hexsha stringlengths 40 40 | size int64 4 996k | ext stringclasses 8
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 4 996k | avg_line_length float64 1.33 58.2k | max_line_length int64 2 323k | alphanum_fraction float64 0 0.97 | content_no_comment stringlengths 0 946k | is_comment_constant_removed bool 2
classes | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f71cd35b8a5ccd91aeb1a0d8c570e794aa9da303 | 403 | py | Python | LeetCode/0069 _ Sqrt(x).py | Achyut-sudo/PythonAlgorithms | 21fb6522510fde7a0877b19a8cedd4665938a4df | [
"MIT"
] | 144 | 2020-09-13T22:54:57.000Z | 2022-02-24T21:54:25.000Z | LeetCode/0069 _ Sqrt(x).py | Achyut-sudo/PythonAlgorithms | 21fb6522510fde7a0877b19a8cedd4665938a4df | [
"MIT"
] | 587 | 2020-05-06T18:55:07.000Z | 2021-09-20T13:14:53.000Z | LeetCode/0069 _ Sqrt(x).py | Achyut-sudo/PythonAlgorithms | 21fb6522510fde7a0877b19a8cedd4665938a4df | [
"MIT"
] | 523 | 2020-09-09T12:07:13.000Z | 2022-02-24T21:54:31.000Z | class Solution(object):
def mySqrt(self, x):
if x<2:
return x
low = 0
high = x
result=0
while(low<=high):
mid = (low+high)//2
if(mid*mid==x):
return mid
elif(mid*mid<x):
low = mid+1
result = mid
else:
high = mid-1
return result | 23.705882 | 31 | 0.377171 | class Solution(object):
def mySqrt(self, x):
if x<2:
return x
low = 0
high = x
result=0
while(low<=high):
mid = (low+high)//2
if(mid*mid==x):
return mid
elif(mid*mid<x):
low = mid+1
result = mid
else:
high = mid-1
return result | true | true |
f71cd3983554ca85caddf54de589eb916e0b3596 | 2,924 | py | Python | nova/virt/baremetal/vif_driver.py | SnabbCo/nova | d156d7fdf241569da2c27ae02ec88e6ef448f7e2 | [
"Apache-2.0"
] | 2 | 2016-04-19T08:20:39.000Z | 2021-10-03T16:00:37.000Z | nova/virt/baremetal/vif_driver.py | SnabbCo/nova | d156d7fdf241569da2c27ae02ec88e6ef448f7e2 | [
"Apache-2.0"
] | 9 | 2015-05-20T11:20:17.000Z | 2017-07-27T08:21:33.000Z | nova/virt/baremetal/vif_driver.py | SnabbCo/nova | d156d7fdf241569da2c27ae02ec88e6ef448f7e2 | [
"Apache-2.0"
] | 13 | 2015-05-05T09:34:04.000Z | 2017-11-08T02:03:46.000Z | # Copyright (c) 2012 NTT DOCOMO, INC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.config import cfg
from nova import context
from nova import exception
from nova.i18n import _
from nova.openstack.common import log as logging
from nova.virt.baremetal import db as bmdb
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
class BareMetalVIFDriver(object):
def _after_plug(self, instance, vif, pif):
pass
def _after_unplug(self, instance, vif, pif):
pass
def plug(self, instance, vif):
LOG.debug("plug: instance_uuid=%(uuid)s vif=%(vif)s",
{'uuid': instance['uuid'], 'vif': vif})
vif_uuid = vif['id']
ctx = context.get_admin_context()
node = bmdb.bm_node_get_by_instance_uuid(ctx, instance['uuid'])
# TODO(deva): optimize this database query
# this is just searching for a free physical interface
pifs = bmdb.bm_interface_get_all_by_bm_node_id(ctx, node['id'])
for pif in pifs:
if not pif['vif_uuid']:
bmdb.bm_interface_set_vif_uuid(ctx, pif['id'], vif_uuid)
LOG.debug("pif:%(id)s is plugged (vif_uuid=%(vif_uuid)s)",
{'id': pif['id'], 'vif_uuid': vif_uuid})
self._after_plug(instance, vif, pif)
return
# NOTE(deva): should this really be raising an exception
# when there are no physical interfaces left?
raise exception.NovaException(_(
"Baremetal node: %(id)s has no available physical interface"
" for virtual interface %(vif_uuid)s")
% {'id': node['id'], 'vif_uuid': vif_uuid})
def unplug(self, instance, vif):
LOG.debug("unplug: instance_uuid=%(uuid)s vif=%(vif)s",
{'uuid': instance['uuid'], 'vif': vif})
vif_uuid = vif['id']
ctx = context.get_admin_context()
try:
pif = bmdb.bm_interface_get_by_vif_uuid(ctx, vif_uuid)
bmdb.bm_interface_set_vif_uuid(ctx, pif['id'], None)
LOG.debug("pif:%(id)s is unplugged (vif_uuid=%(vif_uuid)s)",
{'id': pif['id'], 'vif_uuid': vif_uuid})
self._after_unplug(instance, vif, pif)
except exception.NovaException:
LOG.warn(_("no pif for vif_uuid=%s") % vif_uuid)
| 38.986667 | 78 | 0.619699 |
from oslo.config import cfg
from nova import context
from nova import exception
from nova.i18n import _
from nova.openstack.common import log as logging
from nova.virt.baremetal import db as bmdb
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
class BareMetalVIFDriver(object):
def _after_plug(self, instance, vif, pif):
pass
def _after_unplug(self, instance, vif, pif):
pass
def plug(self, instance, vif):
LOG.debug("plug: instance_uuid=%(uuid)s vif=%(vif)s",
{'uuid': instance['uuid'], 'vif': vif})
vif_uuid = vif['id']
ctx = context.get_admin_context()
node = bmdb.bm_node_get_by_instance_uuid(ctx, instance['uuid'])
pifs = bmdb.bm_interface_get_all_by_bm_node_id(ctx, node['id'])
for pif in pifs:
if not pif['vif_uuid']:
bmdb.bm_interface_set_vif_uuid(ctx, pif['id'], vif_uuid)
LOG.debug("pif:%(id)s is plugged (vif_uuid=%(vif_uuid)s)",
{'id': pif['id'], 'vif_uuid': vif_uuid})
self._after_plug(instance, vif, pif)
return
raise exception.NovaException(_(
"Baremetal node: %(id)s has no available physical interface"
" for virtual interface %(vif_uuid)s")
% {'id': node['id'], 'vif_uuid': vif_uuid})
def unplug(self, instance, vif):
LOG.debug("unplug: instance_uuid=%(uuid)s vif=%(vif)s",
{'uuid': instance['uuid'], 'vif': vif})
vif_uuid = vif['id']
ctx = context.get_admin_context()
try:
pif = bmdb.bm_interface_get_by_vif_uuid(ctx, vif_uuid)
bmdb.bm_interface_set_vif_uuid(ctx, pif['id'], None)
LOG.debug("pif:%(id)s is unplugged (vif_uuid=%(vif_uuid)s)",
{'id': pif['id'], 'vif_uuid': vif_uuid})
self._after_unplug(instance, vif, pif)
except exception.NovaException:
LOG.warn(_("no pif for vif_uuid=%s") % vif_uuid)
| true | true |
f71cd40457b398174b6dfb10bb74cacb3e7ed2d5 | 736 | py | Python | Tutorials/Intro_To_NN/NNDL-solutions/code/chap6p2/exec_shifted_2sig.py | lev1khachatryan/ASDS_CV | c9f0c0412002e929bcb7cc2fc6e5392977a9fa76 | [
"MIT"
] | 5 | 2019-12-13T16:26:10.000Z | 2020-01-10T07:44:05.000Z | Tutorials/Intro_To_NN/NNDL-solutions/code/chap6p2/exec_shifted_2sig.py | lev1khachatryan/ASDS_CV | c9f0c0412002e929bcb7cc2fc6e5392977a9fa76 | [
"MIT"
] | 1 | 2020-01-07T16:48:21.000Z | 2020-03-18T18:43:37.000Z | Tutorials/Intro_To_NN/NNDL-solutions/code/chap6p2/exec_shifted_2sig.py | lev1khachatryan/ASDS_CV | c9f0c0412002e929bcb7cc2fc6e5392977a9fa76 | [
"MIT"
] | null | null | null | import network3
from network3 import Network
from network3 import ConvPoolLayer, FullyConnectedLayer, SoftmaxLayer
training_data, validation_data, test_data = network3.load_data_shared()
mini_batch_size = 10
net = Network([
ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
filter_shape=(20, 1, 5, 5),
poolsize=(2, 2),
activation_fn=network3.shifted_2sig),
FullyConnectedLayer(n_in=20*12*12,
n_out=100,
activation_fn=network3.shifted_2sig),
SoftmaxLayer(n_in=100, n_out=10)], mini_batch_size)
net.SGD(training_data, 60, mini_batch_size, 0.1,
validation_data, test_data) | 46 | 71 | 0.633152 | import network3
from network3 import Network
from network3 import ConvPoolLayer, FullyConnectedLayer, SoftmaxLayer
training_data, validation_data, test_data = network3.load_data_shared()
mini_batch_size = 10
net = Network([
ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
filter_shape=(20, 1, 5, 5),
poolsize=(2, 2),
activation_fn=network3.shifted_2sig),
FullyConnectedLayer(n_in=20*12*12,
n_out=100,
activation_fn=network3.shifted_2sig),
SoftmaxLayer(n_in=100, n_out=10)], mini_batch_size)
net.SGD(training_data, 60, mini_batch_size, 0.1,
validation_data, test_data) | true | true |
f71cd4211d3578ba41ce735db4be9c8ac9129dc8 | 83 | py | Python | app/gis/__init__.py | athkishore/vgr | 48cc04eb9136e7f1d0753173188c9ddbc730f1cb | [
"MIT"
] | null | null | null | app/gis/__init__.py | athkishore/vgr | 48cc04eb9136e7f1d0753173188c9ddbc730f1cb | [
"MIT"
] | 3 | 2016-02-15T02:36:05.000Z | 2016-02-18T06:07:15.000Z | app/gis/__init__.py | athkishore/vgr | 48cc04eb9136e7f1d0753173188c9ddbc730f1cb | [
"MIT"
] | null | null | null | from flask import Blueprint
gis = Blueprint('gis', __name__)
from . import views
| 13.833333 | 32 | 0.746988 | from flask import Blueprint
gis = Blueprint('gis', __name__)
from . import views
| true | true |
f71cd48b6dfbc3793e7303d63dfae53f7d22f018 | 19,977 | py | Python | pymongo/encryption.py | anryko/mongo-python-driver | eda4fbb1591bd88d58d5bd3452f82ed656e95b1c | [
"Apache-2.0"
] | 4 | 2020-04-25T16:53:58.000Z | 2020-04-30T20:43:06.000Z | pymongo/encryption.py | anryko/mongo-python-driver | eda4fbb1591bd88d58d5bd3452f82ed656e95b1c | [
"Apache-2.0"
] | 30 | 2020-04-15T19:37:40.000Z | 2020-04-22T21:19:35.000Z | pymongo/encryption.py | anryko/mongo-python-driver | eda4fbb1591bd88d58d5bd3452f82ed656e95b1c | [
"Apache-2.0"
] | 2 | 2020-03-12T23:20:22.000Z | 2021-02-15T21:54:02.000Z | # Copyright 2019-present MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Support for explicit client-side field level encryption."""
import contextlib
import os
import subprocess
import uuid
import weakref
try:
from pymongocrypt.auto_encrypter import AutoEncrypter
from pymongocrypt.errors import MongoCryptError
from pymongocrypt.explicit_encrypter import ExplicitEncrypter
from pymongocrypt.mongocrypt import MongoCryptOptions
from pymongocrypt.state_machine import MongoCryptCallback
_HAVE_PYMONGOCRYPT = True
except ImportError:
_HAVE_PYMONGOCRYPT = False
MongoCryptCallback = object
from bson import _dict_to_bson, decode, encode
from bson.codec_options import CodecOptions
from bson.binary import (Binary,
STANDARD,
UUID_SUBTYPE)
from bson.errors import BSONError
from bson.raw_bson import (DEFAULT_RAW_BSON_OPTIONS,
RawBSONDocument,
_inflate_bson)
from bson.son import SON
from pymongo.errors import (ConfigurationError,
EncryptionError,
InvalidOperation,
ServerSelectionTimeoutError)
from pymongo.mongo_client import MongoClient
from pymongo.pool import _configured_socket, PoolOptions
from pymongo.read_concern import ReadConcern
from pymongo.ssl_support import get_ssl_context
from pymongo.uri_parser import parse_host
from pymongo.write_concern import WriteConcern
from pymongo.daemon import _spawn_daemon
_HTTPS_PORT = 443
_KMS_CONNECT_TIMEOUT = 10 # TODO: CDRIVER-3262 will define this value.
_MONGOCRYPTD_TIMEOUT_MS = 1000
_DATA_KEY_OPTS = CodecOptions(document_class=SON, uuid_representation=STANDARD)
# Use RawBSONDocument codec options to avoid needlessly decoding
# documents from the key vault.
_KEY_VAULT_OPTS = CodecOptions(document_class=RawBSONDocument,
uuid_representation=STANDARD)
@contextlib.contextmanager
def _wrap_encryption_errors():
"""Context manager to wrap encryption related errors."""
try:
yield
except BSONError:
# BSON encoding/decoding errors are unrelated to encryption so
# we should propagate them unchanged.
raise
except Exception as exc:
raise EncryptionError(exc)
class _EncryptionIO(MongoCryptCallback):
def __init__(self, client, key_vault_coll, mongocryptd_client, opts):
"""Internal class to perform I/O on behalf of pymongocrypt."""
# Use a weak ref to break reference cycle.
if client is not None:
self.client_ref = weakref.ref(client)
else:
self.client_ref = None
self.key_vault_coll = key_vault_coll.with_options(
codec_options=_KEY_VAULT_OPTS,
read_concern=ReadConcern(level='majority'),
write_concern=WriteConcern(w='majority'))
self.mongocryptd_client = mongocryptd_client
self.opts = opts
self._spawned = False
def kms_request(self, kms_context):
"""Complete a KMS request.
:Parameters:
- `kms_context`: A :class:`MongoCryptKmsContext`.
:Returns:
None
"""
endpoint = kms_context.endpoint
message = kms_context.message
host, port = parse_host(endpoint, _HTTPS_PORT)
ctx = get_ssl_context(None, None, None, None, None, None, True)
opts = PoolOptions(connect_timeout=_KMS_CONNECT_TIMEOUT,
socket_timeout=_KMS_CONNECT_TIMEOUT,
ssl_context=ctx)
conn = _configured_socket((host, port), opts)
try:
conn.sendall(message)
while kms_context.bytes_needed > 0:
data = conn.recv(kms_context.bytes_needed)
kms_context.feed(data)
finally:
conn.close()
def collection_info(self, database, filter):
"""Get the collection info for a namespace.
The returned collection info is passed to libmongocrypt which reads
the JSON schema.
:Parameters:
- `database`: The database on which to run listCollections.
- `filter`: The filter to pass to listCollections.
:Returns:
The first document from the listCollections command response as BSON.
"""
with self.client_ref()[database].list_collections(
filter=RawBSONDocument(filter)) as cursor:
for doc in cursor:
return _dict_to_bson(doc, False, _DATA_KEY_OPTS)
def spawn(self):
"""Spawn mongocryptd.
Note this method is thread safe; at most one mongocryptd will start
successfully.
"""
self._spawned = True
args = [self.opts._mongocryptd_spawn_path or 'mongocryptd']
args.extend(self.opts._mongocryptd_spawn_args)
_spawn_daemon(args)
def mark_command(self, database, cmd):
"""Mark a command for encryption.
:Parameters:
- `database`: The database on which to run this command.
- `cmd`: The BSON command to run.
:Returns:
The marked command response from mongocryptd.
"""
if not self._spawned and not self.opts._mongocryptd_bypass_spawn:
self.spawn()
# Database.command only supports mutable mappings so we need to decode
# the raw BSON command first.
inflated_cmd = _inflate_bson(cmd, DEFAULT_RAW_BSON_OPTIONS)
try:
res = self.mongocryptd_client[database].command(
inflated_cmd,
codec_options=DEFAULT_RAW_BSON_OPTIONS)
except ServerSelectionTimeoutError:
if self.opts._mongocryptd_bypass_spawn:
raise
self.spawn()
res = self.mongocryptd_client[database].command(
inflated_cmd,
codec_options=DEFAULT_RAW_BSON_OPTIONS)
return res.raw
def fetch_keys(self, filter):
"""Yields one or more keys from the key vault.
:Parameters:
- `filter`: The filter to pass to find.
:Returns:
A generator which yields the requested keys from the key vault.
"""
with self.key_vault_coll.find(RawBSONDocument(filter)) as cursor:
for key in cursor:
yield key.raw
def insert_data_key(self, data_key):
"""Insert a data key into the key vault.
:Parameters:
- `data_key`: The data key document to insert.
:Returns:
The _id of the inserted data key document.
"""
raw_doc = RawBSONDocument(data_key)
data_key_id = raw_doc.get('_id')
if not isinstance(data_key_id, uuid.UUID):
raise TypeError('data_key _id must be a UUID')
self.key_vault_coll.insert_one(raw_doc)
return Binary(data_key_id.bytes, subtype=UUID_SUBTYPE)
def bson_encode(self, doc):
"""Encode a document to BSON.
A document can be any mapping type (like :class:`dict`).
:Parameters:
- `doc`: mapping type representing a document
:Returns:
The encoded BSON bytes.
"""
return encode(doc)
def close(self):
"""Release resources.
Note it is not safe to call this method from __del__ or any GC hooks.
"""
self.client_ref = None
self.key_vault_coll = None
if self.mongocryptd_client:
self.mongocryptd_client.close()
self.mongocryptd_client = None
class _Encrypter(object):
def __init__(self, io_callbacks, opts):
"""Encrypts and decrypts MongoDB commands.
This class is used to support automatic encryption and decryption of
MongoDB commands.
:Parameters:
- `io_callbacks`: A :class:`MongoCryptCallback`.
- `opts`: The encrypted client's :class:`AutoEncryptionOpts`.
"""
if opts._schema_map is None:
schema_map = None
else:
schema_map = _dict_to_bson(opts._schema_map, False, _DATA_KEY_OPTS)
self._auto_encrypter = AutoEncrypter(io_callbacks, MongoCryptOptions(
opts._kms_providers, schema_map))
self._bypass_auto_encryption = opts._bypass_auto_encryption
self._closed = False
def encrypt(self, database, cmd, check_keys, codec_options):
"""Encrypt a MongoDB command.
:Parameters:
- `database`: The database for this command.
- `cmd`: A command document.
- `check_keys`: If True, check `cmd` for invalid keys.
- `codec_options`: The CodecOptions to use while encoding `cmd`.
:Returns:
The encrypted command to execute.
"""
self._check_closed()
# Workaround for $clusterTime which is incompatible with
# check_keys.
cluster_time = check_keys and cmd.pop('$clusterTime', None)
encoded_cmd = _dict_to_bson(cmd, check_keys, codec_options)
with _wrap_encryption_errors():
encrypted_cmd = self._auto_encrypter.encrypt(database, encoded_cmd)
# TODO: PYTHON-1922 avoid decoding the encrypted_cmd.
encrypt_cmd = _inflate_bson(
encrypted_cmd, DEFAULT_RAW_BSON_OPTIONS)
if cluster_time:
encrypt_cmd['$clusterTime'] = cluster_time
return encrypt_cmd
def decrypt(self, response):
"""Decrypt a MongoDB command response.
:Parameters:
- `response`: A MongoDB command response as BSON.
:Returns:
The decrypted command response.
"""
self._check_closed()
with _wrap_encryption_errors():
return self._auto_encrypter.decrypt(response)
def _check_closed(self):
if self._closed:
raise InvalidOperation("Cannot use MongoClient after close")
def close(self):
"""Cleanup resources."""
self._closed = True
self._auto_encrypter.close()
@staticmethod
def create(client, opts):
"""Create a _CommandEncyptor for a client.
:Parameters:
- `client`: The encrypted MongoClient.
- `opts`: The encrypted client's :class:`AutoEncryptionOpts`.
:Returns:
A :class:`_CommandEncrypter` for this client.
"""
key_vault_client = opts._key_vault_client or client
db, coll = opts._key_vault_namespace.split('.', 1)
key_vault_coll = key_vault_client[db][coll]
mongocryptd_client = MongoClient(
opts._mongocryptd_uri, connect=False,
serverSelectionTimeoutMS=_MONGOCRYPTD_TIMEOUT_MS)
io_callbacks = _EncryptionIO(
client, key_vault_coll, mongocryptd_client, opts)
return _Encrypter(io_callbacks, opts)
class Algorithm(object):
"""An enum that defines the supported encryption algorithms."""
AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic = (
"AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic")
AEAD_AES_256_CBC_HMAC_SHA_512_Random = (
"AEAD_AES_256_CBC_HMAC_SHA_512-Random")
class ClientEncryption(object):
"""Explicit client-side field level encryption."""
def __init__(self, kms_providers, key_vault_namespace, key_vault_client,
codec_options):
"""Explicit client-side field level encryption.
The ClientEncryption class encapsulates explicit operations on a key
vault collection that cannot be done directly on a MongoClient. Similar
to configuring auto encryption on a MongoClient, it is constructed with
a MongoClient (to a MongoDB cluster containing the key vault
collection), KMS provider configuration, and keyVaultNamespace. It
provides an API for explicitly encrypting and decrypting values, and
creating data keys. It does not provide an API to query keys from the
key vault collection, as this can be done directly on the MongoClient.
See :ref:`explicit-client-side-encryption` for an example.
:Parameters:
- `kms_providers`: Map of KMS provider options. Two KMS providers
are supported: "aws" and "local". The kmsProviders map values
differ by provider:
- `aws`: Map with "accessKeyId" and "secretAccessKey" as strings.
These are the AWS access key ID and AWS secret access key used
to generate KMS messages.
- `local`: Map with "key" as a 96-byte array or string. "key"
is the master key used to encrypt/decrypt data keys. This key
should be generated and stored as securely as possible.
- `key_vault_namespace`: The namespace for the key vault collection.
The key vault collection contains all data keys used for encryption
and decryption. Data keys are stored as documents in this MongoDB
collection. Data keys are protected with encryption by a KMS
provider.
- `key_vault_client`: A MongoClient connected to a MongoDB cluster
containing the `key_vault_namespace` collection.
- `codec_options`: An instance of
:class:`~bson.codec_options.CodecOptions` to use when encoding a
value for encryption and decoding the decrypted BSON value. This
should be the same CodecOptions instance configured on the
MongoClient, Database, or Collection used to access application
data.
.. versionadded:: 3.9
"""
if not _HAVE_PYMONGOCRYPT:
raise ConfigurationError(
"client-side field level encryption requires the pymongocrypt "
"library: install a compatible version with: "
"python -m pip install 'pymongo[encryption]'")
if not isinstance(codec_options, CodecOptions):
raise TypeError("codec_options must be an instance of "
"bson.codec_options.CodecOptions")
self._kms_providers = kms_providers
self._key_vault_namespace = key_vault_namespace
self._key_vault_client = key_vault_client
self._codec_options = codec_options
db, coll = key_vault_namespace.split('.', 1)
key_vault_coll = key_vault_client[db][coll]
self._io_callbacks = _EncryptionIO(None, key_vault_coll, None, None)
self._encryption = ExplicitEncrypter(
self._io_callbacks, MongoCryptOptions(kms_providers, None))
def create_data_key(self, kms_provider, master_key=None,
key_alt_names=None):
"""Create and insert a new data key into the key vault collection.
:Parameters:
- `kms_provider`: The KMS provider to use. Supported values are
"aws" and "local".
- `master_key`: Identifies a KMS-specific key used to encrypt the
new data key. If the kmsProvider is "local" the `master_key` is
not applicable and may be omitted. If the `kms_provider` is "aws"
it is required and has the following fields::
- `region` (string): Required. The AWS region, e.g. "us-east-1".
- `key` (string): Required. The Amazon Resource Name (ARN) to
the AWS customer.
- `endpoint` (string): Optional. An alternate host to send KMS
requests to. May include port number, e.g.
"kms.us-east-1.amazonaws.com:443".
- `key_alt_names` (optional): An optional list of string alternate
names used to reference a key. If a key is created with alternate
names, then encryption may refer to the key by the unique alternate
name instead of by ``key_id``. The following example shows creating
and referring to a data key by alternate name::
client_encryption.create_data_key("local", keyAltNames=["name1"])
# reference the key with the alternate name
client_encryption.encrypt("457-55-5462", keyAltName="name1",
algorithm=Algorithm.Random)
:Returns:
The ``_id`` of the created data key document as a
:class:`~bson.binary.Binary` with subtype
:data:`~bson.binary.UUID_SUBTYPE`.
"""
self._check_closed()
with _wrap_encryption_errors():
return self._encryption.create_data_key(
kms_provider, master_key=master_key,
key_alt_names=key_alt_names)
def encrypt(self, value, algorithm, key_id=None, key_alt_name=None):
"""Encrypt a BSON value with a given key and algorithm.
Note that exactly one of ``key_id`` or ``key_alt_name`` must be
provided.
:Parameters:
- `value`: The BSON value to encrypt.
- `algorithm` (string): The encryption algorithm to use. See
:class:`Algorithm` for some valid options.
- `key_id`: Identifies a data key by ``_id`` which must be a
:class:`~bson.binary.Binary` with subtype 4 (
:attr:`~bson.binary.UUID_SUBTYPE`).
- `key_alt_name`: Identifies a key vault document by 'keyAltName'.
:Returns:
The encrypted value, a :class:`~bson.binary.Binary` with subtype 6.
"""
self._check_closed()
if (key_id is not None and not (
isinstance(key_id, Binary) and
key_id.subtype == UUID_SUBTYPE)):
raise TypeError(
'key_id must be a bson.binary.Binary with subtype 4')
doc = encode({'v': value}, codec_options=self._codec_options)
with _wrap_encryption_errors():
encrypted_doc = self._encryption.encrypt(
doc, algorithm, key_id=key_id, key_alt_name=key_alt_name)
return decode(encrypted_doc)['v']
def decrypt(self, value):
"""Decrypt an encrypted value.
:Parameters:
- `value` (Binary): The encrypted value, a
:class:`~bson.binary.Binary` with subtype 6.
:Returns:
The decrypted BSON value.
"""
self._check_closed()
if not (isinstance(value, Binary) and value.subtype == 6):
raise TypeError(
'value to decrypt must be a bson.binary.Binary with subtype 6')
with _wrap_encryption_errors():
doc = encode({'v': value})
decrypted_doc = self._encryption.decrypt(doc)
return decode(decrypted_doc,
codec_options=self._codec_options)['v']
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
def _check_closed(self):
if self._encryption is None:
raise InvalidOperation("Cannot use closed ClientEncryption")
def close(self):
"""Release resources.
Note that using this class in a with-statement will automatically call
:meth:`close`::
with ClientEncryption(...) as client_encryption:
encrypted = client_encryption.encrypt(value, ...)
decrypted = client_encryption.decrypt(encrypted)
"""
if self._io_callbacks:
self._io_callbacks.close()
self._encryption.close()
self._io_callbacks = None
self._encryption = None
| 38.270115 | 79 | 0.638634 |
import contextlib
import os
import subprocess
import uuid
import weakref
try:
from pymongocrypt.auto_encrypter import AutoEncrypter
from pymongocrypt.errors import MongoCryptError
from pymongocrypt.explicit_encrypter import ExplicitEncrypter
from pymongocrypt.mongocrypt import MongoCryptOptions
from pymongocrypt.state_machine import MongoCryptCallback
_HAVE_PYMONGOCRYPT = True
except ImportError:
_HAVE_PYMONGOCRYPT = False
MongoCryptCallback = object
from bson import _dict_to_bson, decode, encode
from bson.codec_options import CodecOptions
from bson.binary import (Binary,
STANDARD,
UUID_SUBTYPE)
from bson.errors import BSONError
from bson.raw_bson import (DEFAULT_RAW_BSON_OPTIONS,
RawBSONDocument,
_inflate_bson)
from bson.son import SON
from pymongo.errors import (ConfigurationError,
EncryptionError,
InvalidOperation,
ServerSelectionTimeoutError)
from pymongo.mongo_client import MongoClient
from pymongo.pool import _configured_socket, PoolOptions
from pymongo.read_concern import ReadConcern
from pymongo.ssl_support import get_ssl_context
from pymongo.uri_parser import parse_host
from pymongo.write_concern import WriteConcern
from pymongo.daemon import _spawn_daemon
_HTTPS_PORT = 443
_KMS_CONNECT_TIMEOUT = 10
_MONGOCRYPTD_TIMEOUT_MS = 1000
_DATA_KEY_OPTS = CodecOptions(document_class=SON, uuid_representation=STANDARD)
_KEY_VAULT_OPTS = CodecOptions(document_class=RawBSONDocument,
uuid_representation=STANDARD)
@contextlib.contextmanager
def _wrap_encryption_errors():
try:
yield
except BSONError:
raise
except Exception as exc:
raise EncryptionError(exc)
class _EncryptionIO(MongoCryptCallback):
def __init__(self, client, key_vault_coll, mongocryptd_client, opts):
if client is not None:
self.client_ref = weakref.ref(client)
else:
self.client_ref = None
self.key_vault_coll = key_vault_coll.with_options(
codec_options=_KEY_VAULT_OPTS,
read_concern=ReadConcern(level='majority'),
write_concern=WriteConcern(w='majority'))
self.mongocryptd_client = mongocryptd_client
self.opts = opts
self._spawned = False
def kms_request(self, kms_context):
endpoint = kms_context.endpoint
message = kms_context.message
host, port = parse_host(endpoint, _HTTPS_PORT)
ctx = get_ssl_context(None, None, None, None, None, None, True)
opts = PoolOptions(connect_timeout=_KMS_CONNECT_TIMEOUT,
socket_timeout=_KMS_CONNECT_TIMEOUT,
ssl_context=ctx)
conn = _configured_socket((host, port), opts)
try:
conn.sendall(message)
while kms_context.bytes_needed > 0:
data = conn.recv(kms_context.bytes_needed)
kms_context.feed(data)
finally:
conn.close()
def collection_info(self, database, filter):
with self.client_ref()[database].list_collections(
filter=RawBSONDocument(filter)) as cursor:
for doc in cursor:
return _dict_to_bson(doc, False, _DATA_KEY_OPTS)
def spawn(self):
self._spawned = True
args = [self.opts._mongocryptd_spawn_path or 'mongocryptd']
args.extend(self.opts._mongocryptd_spawn_args)
_spawn_daemon(args)
def mark_command(self, database, cmd):
if not self._spawned and not self.opts._mongocryptd_bypass_spawn:
self.spawn()
inflated_cmd = _inflate_bson(cmd, DEFAULT_RAW_BSON_OPTIONS)
try:
res = self.mongocryptd_client[database].command(
inflated_cmd,
codec_options=DEFAULT_RAW_BSON_OPTIONS)
except ServerSelectionTimeoutError:
if self.opts._mongocryptd_bypass_spawn:
raise
self.spawn()
res = self.mongocryptd_client[database].command(
inflated_cmd,
codec_options=DEFAULT_RAW_BSON_OPTIONS)
return res.raw
def fetch_keys(self, filter):
with self.key_vault_coll.find(RawBSONDocument(filter)) as cursor:
for key in cursor:
yield key.raw
def insert_data_key(self, data_key):
raw_doc = RawBSONDocument(data_key)
data_key_id = raw_doc.get('_id')
if not isinstance(data_key_id, uuid.UUID):
raise TypeError('data_key _id must be a UUID')
self.key_vault_coll.insert_one(raw_doc)
return Binary(data_key_id.bytes, subtype=UUID_SUBTYPE)
def bson_encode(self, doc):
return encode(doc)
def close(self):
self.client_ref = None
self.key_vault_coll = None
if self.mongocryptd_client:
self.mongocryptd_client.close()
self.mongocryptd_client = None
class _Encrypter(object):
def __init__(self, io_callbacks, opts):
if opts._schema_map is None:
schema_map = None
else:
schema_map = _dict_to_bson(opts._schema_map, False, _DATA_KEY_OPTS)
self._auto_encrypter = AutoEncrypter(io_callbacks, MongoCryptOptions(
opts._kms_providers, schema_map))
self._bypass_auto_encryption = opts._bypass_auto_encryption
self._closed = False
def encrypt(self, database, cmd, check_keys, codec_options):
self._check_closed()
cluster_time = check_keys and cmd.pop('$clusterTime', None)
encoded_cmd = _dict_to_bson(cmd, check_keys, codec_options)
with _wrap_encryption_errors():
encrypted_cmd = self._auto_encrypter.encrypt(database, encoded_cmd)
encrypt_cmd = _inflate_bson(
encrypted_cmd, DEFAULT_RAW_BSON_OPTIONS)
if cluster_time:
encrypt_cmd['$clusterTime'] = cluster_time
return encrypt_cmd
def decrypt(self, response):
self._check_closed()
with _wrap_encryption_errors():
return self._auto_encrypter.decrypt(response)
def _check_closed(self):
if self._closed:
raise InvalidOperation("Cannot use MongoClient after close")
def close(self):
self._closed = True
self._auto_encrypter.close()
@staticmethod
def create(client, opts):
key_vault_client = opts._key_vault_client or client
db, coll = opts._key_vault_namespace.split('.', 1)
key_vault_coll = key_vault_client[db][coll]
mongocryptd_client = MongoClient(
opts._mongocryptd_uri, connect=False,
serverSelectionTimeoutMS=_MONGOCRYPTD_TIMEOUT_MS)
io_callbacks = _EncryptionIO(
client, key_vault_coll, mongocryptd_client, opts)
return _Encrypter(io_callbacks, opts)
class Algorithm(object):
AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic = (
"AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic")
AEAD_AES_256_CBC_HMAC_SHA_512_Random = (
"AEAD_AES_256_CBC_HMAC_SHA_512-Random")
class ClientEncryption(object):
def __init__(self, kms_providers, key_vault_namespace, key_vault_client,
codec_options):
if not _HAVE_PYMONGOCRYPT:
raise ConfigurationError(
"client-side field level encryption requires the pymongocrypt "
"library: install a compatible version with: "
"python -m pip install 'pymongo[encryption]'")
if not isinstance(codec_options, CodecOptions):
raise TypeError("codec_options must be an instance of "
"bson.codec_options.CodecOptions")
self._kms_providers = kms_providers
self._key_vault_namespace = key_vault_namespace
self._key_vault_client = key_vault_client
self._codec_options = codec_options
db, coll = key_vault_namespace.split('.', 1)
key_vault_coll = key_vault_client[db][coll]
self._io_callbacks = _EncryptionIO(None, key_vault_coll, None, None)
self._encryption = ExplicitEncrypter(
self._io_callbacks, MongoCryptOptions(kms_providers, None))
def create_data_key(self, kms_provider, master_key=None,
key_alt_names=None):
self._check_closed()
with _wrap_encryption_errors():
return self._encryption.create_data_key(
kms_provider, master_key=master_key,
key_alt_names=key_alt_names)
def encrypt(self, value, algorithm, key_id=None, key_alt_name=None):
self._check_closed()
if (key_id is not None and not (
isinstance(key_id, Binary) and
key_id.subtype == UUID_SUBTYPE)):
raise TypeError(
'key_id must be a bson.binary.Binary with subtype 4')
doc = encode({'v': value}, codec_options=self._codec_options)
with _wrap_encryption_errors():
encrypted_doc = self._encryption.encrypt(
doc, algorithm, key_id=key_id, key_alt_name=key_alt_name)
return decode(encrypted_doc)['v']
def decrypt(self, value):
self._check_closed()
if not (isinstance(value, Binary) and value.subtype == 6):
raise TypeError(
'value to decrypt must be a bson.binary.Binary with subtype 6')
with _wrap_encryption_errors():
doc = encode({'v': value})
decrypted_doc = self._encryption.decrypt(doc)
return decode(decrypted_doc,
codec_options=self._codec_options)['v']
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
def _check_closed(self):
if self._encryption is None:
raise InvalidOperation("Cannot use closed ClientEncryption")
def close(self):
if self._io_callbacks:
self._io_callbacks.close()
self._encryption.close()
self._io_callbacks = None
self._encryption = None
| true | true |
f71cd544582f43f1165b14c78a32c371d80fe797 | 14,036 | py | Python | stellargraph/mapper/mini_batch_node_generators.py | zblumen/stellargraph | 10e62006907dd5968286f33648d1054e9c961c1b | [
"Apache-2.0"
] | null | null | null | stellargraph/mapper/mini_batch_node_generators.py | zblumen/stellargraph | 10e62006907dd5968286f33648d1054e9c961c1b | [
"Apache-2.0"
] | null | null | null | stellargraph/mapper/mini_batch_node_generators.py | zblumen/stellargraph | 10e62006907dd5968286f33648d1054e9c961c1b | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Copyright 2018-2020 Data61, CSIRO
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Mappers to provide input data for the graph models in layers.
"""
__all__ = ["ClusterNodeGenerator", "ClusterNodeSequence"]
import random
import copy
import numpy as np
import networkx as nx
from tensorflow.keras.utils import Sequence
from scipy import sparse
from ..core.graph import StellarGraph
from ..core.utils import is_real_iterable
class ClusterNodeGenerator:
"""
A data generator for use with ClusterGCN models on homogeneous graphs, [1].
The supplied graph G should be a StellarGraph object that is ready for
machine learning. Currently the model requires node features to be available for all
nodes in the graph.
Use the :meth:`flow` method supplying the nodes and (optionally) targets
to get an object that can be used as a Keras data generator.
This generator will supply the features array and the adjacency matrix to a
mini-batch Keras graph ML model.
[1] `W. Chiang, X. Liu, S. Si, Y. Li, S. Bengio, C. Hsieh, 2019 <https://arxiv.org/abs/1905.07953>`_.
For more information, please see the ClusterGCN demo:
`<https://github.com/stellargraph/stellargraph/blob/master/demos/>`_
Args:
G (StellarGraph): a machine-learning StellarGraph-type graph
clusters (int or list): If int then it indicates the number of clusters (default is 1 that is the given graph).
If clusters is greater than 1, then nodes are uniformly at random assigned to a cluster. If list,
then it should be a list of lists of node IDs such that each list corresponds to a cluster of nodes
in G. The clusters should be non-overlapping.
q (float): The number of clusters to combine for each mini-batch. The default is 1.
lam (float): The mixture coefficient for adjacency matrix normalisation.
name (str): an optional name of the generator
"""
def __init__(self, G, clusters=1, q=1, lam=0.1, name=None):
if not isinstance(G, StellarGraph):
raise TypeError("Graph must be a StellarGraph or StellarDiGraph object.")
self.graph = G
self.name = name
self.q = q # The number of clusters to sample per mini-batch
self.lam = lam
self.clusters = clusters
if isinstance(clusters, list):
self.k = len(clusters)
elif isinstance(clusters, int):
if clusters <= 0:
raise ValueError(
"{}: clusters must be greater than 0.".format(type(self).__name__)
)
self.k = clusters
else:
raise TypeError(
"{}: clusters must be either int or list type.".format(
type(self).__name__
)
)
# Some error checking on the given parameter values
if not isinstance(lam, float):
raise TypeError("{}: lam must be a float type.".format(type(self).__name__))
if lam < 0 or lam > 1:
raise ValueError(
"{}: lam must be in the range [0, 1].".format(type(self).__name__)
)
if not isinstance(q, int):
raise TypeError("{}: q must be integer type.".format(type(self).__name__))
if q <= 0:
raise ValueError(
"{}: q must be greater than 0.".format(type(self).__name__)
)
if self.k % q != 0:
raise ValueError(
"{}: the number of clusters must be exactly divisible by q.".format(
type(self).__name__
)
)
# Check if the graph has features
G.check_graph_for_ml()
self.node_list = list(G.nodes())
# Check that there is only a single node type
if len(G.node_types) > 1:
raise ValueError(
"{}: node generator requires graph with single node type; "
"a graph with multiple node types is passed. Stopping.".format(
type(self).__name__
)
)
if isinstance(clusters, int):
# We are not given graph clusters.
# We are going to split the graph into self.k random clusters
all_nodes = list(G.nodes())
random.shuffle(all_nodes)
cluster_size = len(all_nodes) // self.k
self.clusters = [
all_nodes[i : i + cluster_size]
for i in range(0, len(all_nodes), cluster_size)
]
if len(self.clusters) > self.k:
# for the case that the number of nodes is not exactly divisible by k, we combine
# the last cluster with the second last one
self.clusters[-2].extend(self.clusters[-1])
del self.clusters[-1]
print(f"Number of clusters {self.k}")
for i, c in enumerate(self.clusters):
print(f"{i} cluster has size {len(c)}")
# Get the features for the nodes
self.features = G.node_features(self.node_list)
def flow(self, node_ids, targets=None, name=None):
"""
Creates a generator/sequence object for training, evaluation, or prediction
with the supplied node ids and numeric targets.
Args:
node_ids (iterable): an iterable of node ids for the nodes of interest
(e.g., training, validation, or test set nodes)
targets (2d array, optional): a 2D array of numeric node targets with shape `(len(node_ids),
target_size)`
name (str, optional): An optional name for the returned generator object.
Returns:
A ClusterNodeSequence object to use with ClusterGCN in Keras
methods :meth:`fit_generator`, :meth:`evaluate_generator`, and :meth:`predict_generator`
"""
if targets is not None:
# Check targets is an iterable
if not is_real_iterable(targets):
raise TypeError(
"{}: Targets must be an iterable or None".format(
type(self).__name__
)
)
# Check targets correct shape
if len(targets) != len(node_ids):
raise ValueError(
"{}: Targets must be the same length as node_ids".format(
type(self).__name__
)
)
return ClusterNodeSequence(
self.graph,
self.clusters,
targets=targets,
node_ids=node_ids,
q=self.q,
lam=self.lam,
name=name,
)
class ClusterNodeSequence(Sequence):
"""
A Keras-compatible data generator for node inference using ClusterGCN model.
Use this class with the Keras methods :meth:`keras.Model.fit_generator`,
:meth:`keras.Model.evaluate_generator`, and
:meth:`keras.Model.predict_generator`,
This class should be created using the `.flow(...)` method of
:class:`ClusterNodeGenerator`.
Args:
graph (StellarGraph): The graph
clusters (list): A list of lists such that each sub-list indicates the nodes in a cluster.
The length of this list, len(clusters) indicates the number of batches in one epoch.
targets (np.ndarray, optional): An optional array of node targets of size (N x C),
where C is the target size (e.g., number of classes for one-hot class targets)
node_ids (iterable, optional): The node IDs for the target nodes. Required if targets is not None.
normalize_adj (bool, optional): Specifies whether the adjacency matrix for each mini-batch should
be normalized or not. The default is True.
q (int, optional): The number of subgraphs to combine for each batch. The default value is
1 such that the generator treats each subgraph as a batch.
lam (float, optional): The mixture coefficient for adjacency matrix normalisation (the
'diagonal enhancement' method). Valid values are in the interval [0, 1] and the default value is 0.1.
name (str, optional): An optional name for this generator object.
"""
def __init__(
self,
graph,
clusters,
targets=None,
node_ids=None,
normalize_adj=True,
q=1,
lam=0.1,
name=None,
):
self.name = name
self.clusters = list()
self.clusters_original = copy.deepcopy(clusters)
self.graph = graph
self.node_list = list(graph.nodes())
self.normalize_adj = normalize_adj
self.q = q
self.lam = lam
self.node_order = list()
self._node_order_in_progress = list()
self.__node_buffer = dict()
self.target_ids = list()
if len(clusters) % self.q != 0:
raise ValueError(
"The number of clusters should be exactly divisible by q. However, {} number of clusters is not exactly divisible by {}.".format(
len(clusters), q
)
)
if node_ids is not None:
self.target_ids = list(node_ids)
if targets is not None:
if node_ids is None:
raise ValueError(
"Since targets is not None, node_ids must be given and cannot be None."
)
if len(node_ids) != len(targets):
raise ValueError(
"When passed together targets and indices should be the same length."
)
self.targets = np.asanyarray(targets)
self.target_node_lookup = dict(
zip(self.target_ids, range(len(self.target_ids)))
)
else:
self.targets = None
self.on_epoch_end()
def __len__(self):
num_batches = len(self.clusters_original) // self.q
return num_batches
def __getitem__(self, index):
# The next batch should be the adjacency matrix for the cluster and the corresponding feature vectors
# and targets if available.
cluster = self.clusters[index]
adj_cluster = self.graph.to_adjacency_matrix(cluster)
# The operations to normalize the adjacency matrix are too slow.
# Either optimize this or implement as a layer(?)
if self.normalize_adj:
# add self loops
adj_cluster.setdiag(1) # add self loops
degree_matrix_diag = 1.0 / (adj_cluster.sum(axis=1) + 1)
degree_matrix_diag = np.squeeze(np.asarray(degree_matrix_diag))
degree_matrix = sparse.lil_matrix(adj_cluster.shape)
degree_matrix.setdiag(degree_matrix_diag)
adj_cluster = degree_matrix.tocsr() @ adj_cluster
adj_cluster.setdiag((1.0 + self.lam) * adj_cluster.diagonal())
adj_cluster = adj_cluster.toarray()
g_node_list = list(cluster)
# Determine the target nodes that exist in this cluster
target_nodes_in_cluster = np.asanyarray(
list(set(g_node_list).intersection(self.target_ids))
)
self.__node_buffer[index] = target_nodes_in_cluster
# Dictionary to store node indices for quicker node index lookups
node_lookup = dict(zip(g_node_list, range(len(g_node_list))))
# The list of indices of the target nodes in self.node_list
target_node_indices = np.array(
[node_lookup[n] for n in target_nodes_in_cluster]
)
if index == (len(self.clusters_original) // self.q) - 1:
# last batch
self.__node_buffer_dict_to_list()
cluster_targets = None
#
if self.targets is not None:
# Dictionary to store node indices for quicker node index lookups
# The list of indices of the target nodes in self.node_list
cluster_target_indices = np.array(
[self.target_node_lookup[n] for n in target_nodes_in_cluster]
)
cluster_targets = self.targets[cluster_target_indices]
cluster_targets = cluster_targets.reshape((1,) + cluster_targets.shape)
features = self.graph.node_features(g_node_list)
features = np.reshape(features, (1,) + features.shape)
adj_cluster = adj_cluster.reshape((1,) + adj_cluster.shape)
target_node_indices = target_node_indices[np.newaxis, np.newaxis, :]
return [features, target_node_indices, adj_cluster], cluster_targets
def __node_buffer_dict_to_list(self):
self.node_order = []
for k, v in self.__node_buffer.items():
self.node_order.extend(v)
def on_epoch_end(self):
"""
Shuffle all nodes at the end of each epoch
"""
if self.q > 1:
# combine clusters
cluster_indices = list(range(len(self.clusters_original)))
random.shuffle(cluster_indices)
self.clusters = []
for i in range(0, len(cluster_indices) - 1, self.q):
cc = cluster_indices[i : i + self.q]
tmp = []
for l in cc:
tmp.extend(list(self.clusters_original[l]))
self.clusters.append(tmp)
else:
self.clusters = copy.deepcopy(self.clusters_original)
self.__node_buffer = dict()
random.shuffle(self.clusters)
| 38.245232 | 145 | 0.604731 |
__all__ = ["ClusterNodeGenerator", "ClusterNodeSequence"]
import random
import copy
import numpy as np
import networkx as nx
from tensorflow.keras.utils import Sequence
from scipy import sparse
from ..core.graph import StellarGraph
from ..core.utils import is_real_iterable
class ClusterNodeGenerator:
def __init__(self, G, clusters=1, q=1, lam=0.1, name=None):
if not isinstance(G, StellarGraph):
raise TypeError("Graph must be a StellarGraph or StellarDiGraph object.")
self.graph = G
self.name = name
self.q = q
self.lam = lam
self.clusters = clusters
if isinstance(clusters, list):
self.k = len(clusters)
elif isinstance(clusters, int):
if clusters <= 0:
raise ValueError(
"{}: clusters must be greater than 0.".format(type(self).__name__)
)
self.k = clusters
else:
raise TypeError(
"{}: clusters must be either int or list type.".format(
type(self).__name__
)
)
if not isinstance(lam, float):
raise TypeError("{}: lam must be a float type.".format(type(self).__name__))
if lam < 0 or lam > 1:
raise ValueError(
"{}: lam must be in the range [0, 1].".format(type(self).__name__)
)
if not isinstance(q, int):
raise TypeError("{}: q must be integer type.".format(type(self).__name__))
if q <= 0:
raise ValueError(
"{}: q must be greater than 0.".format(type(self).__name__)
)
if self.k % q != 0:
raise ValueError(
"{}: the number of clusters must be exactly divisible by q.".format(
type(self).__name__
)
)
G.check_graph_for_ml()
self.node_list = list(G.nodes())
if len(G.node_types) > 1:
raise ValueError(
"{}: node generator requires graph with single node type; "
"a graph with multiple node types is passed. Stopping.".format(
type(self).__name__
)
)
if isinstance(clusters, int):
all_nodes = list(G.nodes())
random.shuffle(all_nodes)
cluster_size = len(all_nodes) // self.k
self.clusters = [
all_nodes[i : i + cluster_size]
for i in range(0, len(all_nodes), cluster_size)
]
if len(self.clusters) > self.k:
self.clusters[-2].extend(self.clusters[-1])
del self.clusters[-1]
print(f"Number of clusters {self.k}")
for i, c in enumerate(self.clusters):
print(f"{i} cluster has size {len(c)}")
self.features = G.node_features(self.node_list)
def flow(self, node_ids, targets=None, name=None):
if targets is not None:
if not is_real_iterable(targets):
raise TypeError(
"{}: Targets must be an iterable or None".format(
type(self).__name__
)
)
if len(targets) != len(node_ids):
raise ValueError(
"{}: Targets must be the same length as node_ids".format(
type(self).__name__
)
)
return ClusterNodeSequence(
self.graph,
self.clusters,
targets=targets,
node_ids=node_ids,
q=self.q,
lam=self.lam,
name=name,
)
class ClusterNodeSequence(Sequence):
def __init__(
self,
graph,
clusters,
targets=None,
node_ids=None,
normalize_adj=True,
q=1,
lam=0.1,
name=None,
):
self.name = name
self.clusters = list()
self.clusters_original = copy.deepcopy(clusters)
self.graph = graph
self.node_list = list(graph.nodes())
self.normalize_adj = normalize_adj
self.q = q
self.lam = lam
self.node_order = list()
self._node_order_in_progress = list()
self.__node_buffer = dict()
self.target_ids = list()
if len(clusters) % self.q != 0:
raise ValueError(
"The number of clusters should be exactly divisible by q. However, {} number of clusters is not exactly divisible by {}.".format(
len(clusters), q
)
)
if node_ids is not None:
self.target_ids = list(node_ids)
if targets is not None:
if node_ids is None:
raise ValueError(
"Since targets is not None, node_ids must be given and cannot be None."
)
if len(node_ids) != len(targets):
raise ValueError(
"When passed together targets and indices should be the same length."
)
self.targets = np.asanyarray(targets)
self.target_node_lookup = dict(
zip(self.target_ids, range(len(self.target_ids)))
)
else:
self.targets = None
self.on_epoch_end()
def __len__(self):
num_batches = len(self.clusters_original) // self.q
return num_batches
def __getitem__(self, index):
cluster = self.clusters[index]
adj_cluster = self.graph.to_adjacency_matrix(cluster)
if self.normalize_adj:
adj_cluster.setdiag(1)
degree_matrix_diag = 1.0 / (adj_cluster.sum(axis=1) + 1)
degree_matrix_diag = np.squeeze(np.asarray(degree_matrix_diag))
degree_matrix = sparse.lil_matrix(adj_cluster.shape)
degree_matrix.setdiag(degree_matrix_diag)
adj_cluster = degree_matrix.tocsr() @ adj_cluster
adj_cluster.setdiag((1.0 + self.lam) * adj_cluster.diagonal())
adj_cluster = adj_cluster.toarray()
g_node_list = list(cluster)
target_nodes_in_cluster = np.asanyarray(
list(set(g_node_list).intersection(self.target_ids))
)
self.__node_buffer[index] = target_nodes_in_cluster
node_lookup = dict(zip(g_node_list, range(len(g_node_list))))
target_node_indices = np.array(
[node_lookup[n] for n in target_nodes_in_cluster]
)
if index == (len(self.clusters_original) // self.q) - 1:
self.__node_buffer_dict_to_list()
cluster_targets = None
if self.targets is not None:
cluster_target_indices = np.array(
[self.target_node_lookup[n] for n in target_nodes_in_cluster]
)
cluster_targets = self.targets[cluster_target_indices]
cluster_targets = cluster_targets.reshape((1,) + cluster_targets.shape)
features = self.graph.node_features(g_node_list)
features = np.reshape(features, (1,) + features.shape)
adj_cluster = adj_cluster.reshape((1,) + adj_cluster.shape)
target_node_indices = target_node_indices[np.newaxis, np.newaxis, :]
return [features, target_node_indices, adj_cluster], cluster_targets
def __node_buffer_dict_to_list(self):
self.node_order = []
for k, v in self.__node_buffer.items():
self.node_order.extend(v)
def on_epoch_end(self):
if self.q > 1:
cluster_indices = list(range(len(self.clusters_original)))
random.shuffle(cluster_indices)
self.clusters = []
for i in range(0, len(cluster_indices) - 1, self.q):
cc = cluster_indices[i : i + self.q]
tmp = []
for l in cc:
tmp.extend(list(self.clusters_original[l]))
self.clusters.append(tmp)
else:
self.clusters = copy.deepcopy(self.clusters_original)
self.__node_buffer = dict()
random.shuffle(self.clusters)
| true | true |
f71cd723c890ed8cd37ed7a27760fb739e4bfcaf | 2,832 | py | Python | app/scraping.py | Jvism/web-scraper | 3d3625f1831ca51d8c77e47e799c822e3e19d97d | [
"MIT"
] | null | null | null | app/scraping.py | Jvism/web-scraper | 3d3625f1831ca51d8c77e47e799c822e3e19d97d | [
"MIT"
] | null | null | null | app/scraping.py | Jvism/web-scraper | 3d3625f1831ca51d8c77e47e799c822e3e19d97d | [
"MIT"
] | null | null | null | import requests as req
from bs4 import BeautifulSoup as bs
import os
def clearConsole():
command = 'clear'
if os.name in ('nt', 'dos'): # If Machine is running on Windows, use cls
command = 'cls'
os.system(command)
def soup_recover(url):
request = req.get(url)
return bs(request.text,features="html.parser")
def urls_extract(soup):
urls = []
for article in soup.find_all('article'):
url = article.find_all('a',limit=1)[0]
urls.append('https://books.toscrape.com/catalogue/' + url['href'])
return urls
def extract_data(soup):
data_book = []
for page in soup.select('.page'):
book_information = page.find_all('td')
title = page.find_all('h1')[0].text
price = page.find_all('p')[0].text
stock = book_information[5].text
category = page.find_all('li')[2].text.split('\n')[1]
cover = 'https://books.toscrape.com/' + page.find_all('img')[0]['src'].split('../')[2]
upc = book_information[0].text
product_type = book_information[1].text
price_excl_tax = book_information[2].text
price_incl_tax = book_information[3].text
tax = book_information[4].text
number_reviews = book_information[6].text
data_book.extend([title,price,stock,category,cover,upc,product_type,price_excl_tax,price_incl_tax,tax,stock,number_reviews])
return data_book
def export_csv(data):
contador = 0
file = open('books_data.csv','w',encoding="utf-8")
file.write('title,price,stock,category,cover,upc,product type,price (excl. tax),price (incl. tac),tax,availability,number of reviews\n')
for book_data in data:
info = ''
for index,information in enumerate(book_data):
if index == 0:
words = information.split(',')
title = ''
for word in words:
title += word
info += title + ','
elif index == len(book_data)-1:
info += information
else:
info += information + ','
file.write(info + '\n')
clearConsole()
print(str(round(contador*0.1,1)) + '%')
contador += 1
file.close()
def launch_app():
url_web = 'https://books.toscrape.com/'
urls_books = []
number_pages = 50
for page in range(number_pages):
urls_books.extend(urls_extract(soup_recover(url_web + 'catalogue/page-' + str(page+1) + '.html')))
clearConsole()
print(str(round((page)*2,1)) + '%')
data_books = []
for index,url in enumerate(urls_books):
data_books.append(extract_data(soup_recover(url)))
clearConsole()
print(str(round(index*0.1,1)) + '%')
export_csv(data_books)
return
launch_app()
| 27.495146 | 140 | 0.592514 | import requests as req
from bs4 import BeautifulSoup as bs
import os
def clearConsole():
command = 'clear'
if os.name in ('nt', 'dos'):
command = 'cls'
os.system(command)
def soup_recover(url):
request = req.get(url)
return bs(request.text,features="html.parser")
def urls_extract(soup):
urls = []
for article in soup.find_all('article'):
url = article.find_all('a',limit=1)[0]
urls.append('https://books.toscrape.com/catalogue/' + url['href'])
return urls
def extract_data(soup):
data_book = []
for page in soup.select('.page'):
book_information = page.find_all('td')
title = page.find_all('h1')[0].text
price = page.find_all('p')[0].text
stock = book_information[5].text
category = page.find_all('li')[2].text.split('\n')[1]
cover = 'https://books.toscrape.com/' + page.find_all('img')[0]['src'].split('../')[2]
upc = book_information[0].text
product_type = book_information[1].text
price_excl_tax = book_information[2].text
price_incl_tax = book_information[3].text
tax = book_information[4].text
number_reviews = book_information[6].text
data_book.extend([title,price,stock,category,cover,upc,product_type,price_excl_tax,price_incl_tax,tax,stock,number_reviews])
return data_book
def export_csv(data):
contador = 0
file = open('books_data.csv','w',encoding="utf-8")
file.write('title,price,stock,category,cover,upc,product type,price (excl. tax),price (incl. tac),tax,availability,number of reviews\n')
for book_data in data:
info = ''
for index,information in enumerate(book_data):
if index == 0:
words = information.split(',')
title = ''
for word in words:
title += word
info += title + ','
elif index == len(book_data)-1:
info += information
else:
info += information + ','
file.write(info + '\n')
clearConsole()
print(str(round(contador*0.1,1)) + '%')
contador += 1
file.close()
def launch_app():
url_web = 'https://books.toscrape.com/'
urls_books = []
number_pages = 50
for page in range(number_pages):
urls_books.extend(urls_extract(soup_recover(url_web + 'catalogue/page-' + str(page+1) + '.html')))
clearConsole()
print(str(round((page)*2,1)) + '%')
data_books = []
for index,url in enumerate(urls_books):
data_books.append(extract_data(soup_recover(url)))
clearConsole()
print(str(round(index*0.1,1)) + '%')
export_csv(data_books)
return
launch_app()
| true | true |
f71cd7eb1d7001125af24fce5bc28d33488eb10f | 1,313 | py | Python | runtime/opt/taupage/init.d/06-update-sysctl.py | pc-alves/taupage | 07025d45772d47b43e0a20d7ee21f10a6ff5162d | [
"Apache-2.0"
] | 49 | 2015-04-14T13:55:10.000Z | 2020-02-14T22:55:43.000Z | runtime/opt/taupage/init.d/06-update-sysctl.py | pc-alves/taupage | 07025d45772d47b43e0a20d7ee21f10a6ff5162d | [
"Apache-2.0"
] | 538 | 2015-04-01T10:53:09.000Z | 2020-04-17T08:43:36.000Z | runtime/opt/taupage/init.d/06-update-sysctl.py | pc-alves/taupage | 07025d45772d47b43e0a20d7ee21f10a6ff5162d | [
"Apache-2.0"
] | 67 | 2015-05-05T19:48:30.000Z | 2020-11-04T04:59:00.000Z | #!/usr/bin/env python3
import logging
import sys
import subprocess
from taupage import configure_logging, get_config
def main():
"""Configure custom sysctl parameters
If a sysctl section is present, add the valid parameters to sysctl and reloads.
"""
CUSTOM_SYSCTL_CONF = '/etc/sysctl.d/99-custom.conf'
configure_logging()
config = get_config()
sysctl = config.get('sysctl')
if sysctl is None:
sys.exit(0)
try:
sysctl_entries = ['{} = {}'.format(key, value) for key, value in sysctl.items()]
with open(CUSTOM_SYSCTL_CONF, 'w') as file:
file.write('\n'.join(sysctl_entries)+'\n')
logging.info('Successfully written sysctl parameters')
except Exception as e:
logging.error('Failed to write sysctl parameters')
logging.exception(e)
sys.exit(1)
try:
exitcode = subprocess.call(['/sbin/sysctl', '-p', CUSTOM_SYSCTL_CONF])
if exitcode != 0:
logging.error('Reloading sysctl failed with exitcode {}'.format(exitcode))
sys.exit(1)
logging.info('Successfully reloaded sysctl parameters')
except Exception as e:
logging.error('Failed to reload sysctl')
logging.exception(e)
sys.exit(1)
if __name__ == '__main__':
main()
| 26.795918 | 88 | 0.638995 |
import logging
import sys
import subprocess
from taupage import configure_logging, get_config
def main():
CUSTOM_SYSCTL_CONF = '/etc/sysctl.d/99-custom.conf'
configure_logging()
config = get_config()
sysctl = config.get('sysctl')
if sysctl is None:
sys.exit(0)
try:
sysctl_entries = ['{} = {}'.format(key, value) for key, value in sysctl.items()]
with open(CUSTOM_SYSCTL_CONF, 'w') as file:
file.write('\n'.join(sysctl_entries)+'\n')
logging.info('Successfully written sysctl parameters')
except Exception as e:
logging.error('Failed to write sysctl parameters')
logging.exception(e)
sys.exit(1)
try:
exitcode = subprocess.call(['/sbin/sysctl', '-p', CUSTOM_SYSCTL_CONF])
if exitcode != 0:
logging.error('Reloading sysctl failed with exitcode {}'.format(exitcode))
sys.exit(1)
logging.info('Successfully reloaded sysctl parameters')
except Exception as e:
logging.error('Failed to reload sysctl')
logging.exception(e)
sys.exit(1)
if __name__ == '__main__':
main()
| true | true |
f71cd8d3024c1d478964feaae2c4352cb8ad6a81 | 852 | py | Python | lib/utils.py | Titorat/SSrehab | 6691ee1ed442073bfa00a51f0d9ab74b9252d302 | [
"MIT"
] | null | null | null | lib/utils.py | Titorat/SSrehab | 6691ee1ed442073bfa00a51f0d9ab74b9252d302 | [
"MIT"
] | null | null | null | lib/utils.py | Titorat/SSrehab | 6691ee1ed442073bfa00a51f0d9ab74b9252d302 | [
"MIT"
] | null | null | null | # standard library
from subprocess import run, PIPE
from typing import List
RUN_CMD_ONFAIL_EXITCODE = 22
def run_cmd(cmd: List[str]):
"""A wrapper around subprocess.run that nicely fails on a non-zero exit code"""
if len(cmd) == 0:
raise ValueError('cmd has to be a non-empty list')
res = run(cmd, stdout=PIPE, stderr=PIPE)
if res.returncode != 0:
print(f"command \"{cmd[0]}\" finished with exit code: {res.returncode}")
stderr = res.stderr.decode('utf-8')
if stderr:
print("and produced the following error message:")
print(stderr)
exit(RUN_CMD_ONFAIL_EXITCODE)
return res.stdout.decode('utf-8').rstrip()
def run_bash(bash_code: str):
"""Safely runs a bash command, and nicely fails on a non-zero exit code"""
return run_cmd(['bash', '-c', bash_code])
| 30.428571 | 83 | 0.651408 |
from subprocess import run, PIPE
from typing import List
RUN_CMD_ONFAIL_EXITCODE = 22
def run_cmd(cmd: List[str]):
if len(cmd) == 0:
raise ValueError('cmd has to be a non-empty list')
res = run(cmd, stdout=PIPE, stderr=PIPE)
if res.returncode != 0:
print(f"command \"{cmd[0]}\" finished with exit code: {res.returncode}")
stderr = res.stderr.decode('utf-8')
if stderr:
print("and produced the following error message:")
print(stderr)
exit(RUN_CMD_ONFAIL_EXITCODE)
return res.stdout.decode('utf-8').rstrip()
def run_bash(bash_code: str):
return run_cmd(['bash', '-c', bash_code])
| true | true |
f71cd9187dca0fd26b3cdbda0a4c8921d179e358 | 14,773 | py | Python | python/helpers/pydev/pydevd_plugins/jinja2_debug.py | teddywest32/intellij-community | e0268d7a1da1d318b441001448cdd3e8929b2f29 | [
"Apache-2.0"
] | null | null | null | python/helpers/pydev/pydevd_plugins/jinja2_debug.py | teddywest32/intellij-community | e0268d7a1da1d318b441001448cdd3e8929b2f29 | [
"Apache-2.0"
] | 11 | 2017-02-27T22:35:32.000Z | 2021-12-24T08:07:40.000Z | python/helpers/pydev/pydevd_plugins/jinja2_debug.py | teddywest32/intellij-community | e0268d7a1da1d318b441001448cdd3e8929b2f29 | [
"Apache-2.0"
] | null | null | null | import traceback
from _pydevd_bundle.pydevd_breakpoints import LineBreakpoint, get_exception_name
from _pydevd_bundle.pydevd_constants import get_thread_id, STATE_SUSPEND, dict_contains, dict_iter_items, dict_keys, JINJA2_SUSPEND
from _pydevd_bundle.pydevd_comm import CMD_SET_BREAK, CMD_ADD_EXCEPTION_BREAK
from _pydevd_bundle import pydevd_vars
from pydevd_file_utils import get_abs_path_real_path_and_base_from_file
from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame, FCode
class Jinja2LineBreakpoint(LineBreakpoint):
def __init__(self, file, line, condition, func_name, expression):
self.file = file
LineBreakpoint.__init__(self, line, condition, func_name, expression)
def is_triggered(self, template_frame_file, template_frame_line):
return self.file == template_frame_file and self.line == template_frame_line
def __str__(self):
return "Jinja2LineBreakpoint: %s-%d" %(self.file, self.line)
def add_line_breakpoint(plugin, pydb, type, file, line, condition, expression, func_name):
result = None
if type == 'jinja2-line':
breakpoint = Jinja2LineBreakpoint(file, line, condition, func_name, expression)
if not hasattr(pydb, 'jinja2_breakpoints'):
_init_plugin_breaks(pydb)
result = breakpoint, pydb.jinja2_breakpoints
return result
return result
def add_exception_breakpoint(plugin, pydb, type, exception):
if type == 'jinja2':
if not hasattr(pydb, 'jinja2_exception_break'):
_init_plugin_breaks(pydb)
pydb.jinja2_exception_break[exception] = True
pydb.set_tracing_for_untraced_contexts()
return True
return False
def _init_plugin_breaks(pydb):
pydb.jinja2_exception_break = {}
pydb.jinja2_breakpoints = {}
def remove_exception_breakpoint(plugin, pydb, type, exception):
if type == 'jinja2':
try:
del pydb.jinja2_exception_break[exception]
return True
except:
pass
return False
def get_breakpoints(plugin, pydb, type):
if type == 'jinja2-line':
return pydb.jinja2_breakpoints
return None
def _is_jinja2_render_call(frame):
try:
name = frame.f_code.co_name
if dict_contains(frame.f_globals, "__jinja_template__") and name in ("root", "loop", "macro") or name.startswith("block_"):
return True
return False
except:
traceback.print_exc()
return False
def _suspend_jinja2(pydb, thread, frame, cmd=CMD_SET_BREAK, message=None):
frame = Jinja2TemplateFrame(frame)
if frame.f_lineno is None:
return None
pydevd_vars.add_additional_frame_by_id(get_thread_id(thread), {id(frame): frame})
pydb.set_suspend(thread, cmd)
thread.additional_info.suspend_type = JINJA2_SUSPEND
if cmd == CMD_ADD_EXCEPTION_BREAK:
# send exception name as message
if message:
message = str(message)
thread.additional_info.pydev_message = message
return frame
def _is_jinja2_suspended(thread):
return thread.additional_info.suspend_type == JINJA2_SUSPEND
def _is_jinja2_context_call(frame):
return dict_contains(frame.f_locals, "_Context__obj")
def _is_jinja2_internal_function(frame):
return dict_contains(frame.f_locals, 'self') and frame.f_locals['self'].__class__.__name__ in \
('LoopContext', 'TemplateReference', 'Macro', 'BlockReference')
def _find_jinja2_render_frame(frame):
while frame is not None and not _is_jinja2_render_call(frame):
frame = frame.f_back
return frame
#=======================================================================================================================
# Jinja2 Frame
#=======================================================================================================================
class Jinja2TemplateFrame:
def __init__(self, frame):
file_name = _get_jinja2_template_filename(frame)
self.back_context = None
if 'context' in frame.f_locals:
#sometimes we don't have 'context', e.g. in macros
self.back_context = frame.f_locals['context']
self.f_code = FCode('template', file_name)
self.f_lineno = _get_jinja2_template_line(frame)
self.f_back = frame
self.f_globals = {}
self.f_locals = self.collect_context(frame)
self.f_trace = None
def collect_context(self, frame):
res = {}
for k, v in frame.f_locals.items():
if not k.startswith('l_'):
res[k] = v
elif v and not _is_missing(v):
res[k[2:]] = v
if self.back_context is not None:
for k, v in self.back_context.items():
res[k] = v
return res
def _change_variable(self, frame, name, value):
in_vars_or_parents = False
if 'context' in frame.f_locals:
if name in frame.f_locals['context'].parent:
self.back_context.parent[name] = value
in_vars_or_parents = True
if name in frame.f_locals['context'].vars:
self.back_context.vars[name] = value
in_vars_or_parents = True
l_name = 'l_' + name
if l_name in frame.f_locals:
if in_vars_or_parents:
frame.f_locals[l_name] = self.back_context.resolve(name)
else:
frame.f_locals[l_name] = value
def change_variable(plugin, frame, attr, expression):
if isinstance(frame, Jinja2TemplateFrame):
result = eval(expression, frame.f_globals, frame.f_locals)
frame._change_variable(frame.f_back, attr, result)
return result
return False
def _is_missing(item):
if item.__class__.__name__ == 'MissingType':
return True
return False
def _find_render_function_frame(frame):
#in order to hide internal rendering functions
old_frame = frame
try:
while not (dict_contains(frame.f_locals, 'self') and frame.f_locals['self'].__class__.__name__ == 'Template' and \
frame.f_code.co_name == 'render'):
frame = frame.f_back
if frame is None:
return old_frame
return frame
except:
return old_frame
def _get_jinja2_template_line(frame):
debug_info = None
if dict_contains(frame.f_globals,'__jinja_template__'):
_debug_info = frame.f_globals['__jinja_template__']._debug_info
if _debug_info != '':
#sometimes template contains only plain text
debug_info = frame.f_globals['__jinja_template__'].debug_info
if debug_info is None:
return None
lineno = frame.f_lineno
for pair in debug_info:
if pair[1] == lineno:
return pair[0]
return None
def _get_jinja2_template_filename(frame):
if dict_contains(frame.f_globals, '__jinja_template__'):
fname = frame.f_globals['__jinja_template__'].filename
abs_path_real_path_and_base = get_abs_path_real_path_and_base_from_file(fname)
return abs_path_real_path_and_base[1]
return None
#=======================================================================================================================
# Jinja2 Step Commands
#=======================================================================================================================
def has_exception_breaks(plugin):
if len(plugin.main_debugger.jinja2_exception_break) > 0:
return True
return False
def has_line_breaks(plugin):
for file, breakpoints in dict_iter_items(plugin.main_debugger.jinja2_breakpoints):
if len(breakpoints) > 0:
return True
return False
def can_not_skip(plugin, pydb, pydb_frame, frame):
if pydb.jinja2_breakpoints and _is_jinja2_render_call(frame):
filename = _get_jinja2_template_filename(frame)
jinja2_breakpoints_for_file = pydb.jinja2_breakpoints.get(filename)
if jinja2_breakpoints_for_file:
return True
return False
def cmd_step_into(plugin, pydb, frame, event, args, stop_info, stop):
pydb, filename, info, thread = args
plugin_stop = False
stop_info['jinja2_stop'] = False
if _is_jinja2_suspended(thread):
stop_info['jinja2_stop'] = event in ('call', 'line') and _is_jinja2_render_call(frame)
plugin_stop = stop_info['jinja2_stop']
stop = False
if info.pydev_call_from_jinja2 is not None:
if _is_jinja2_internal_function(frame):
#if internal Jinja2 function was called, we sould continue debugging inside template
info.pydev_call_from_jinja2 = None
else:
#we go into python code from Jinja2 rendering frame
stop = True
if event == 'call' and _is_jinja2_context_call(frame.f_back):
#we called function from context, the next step will be in function
info.pydev_call_from_jinja2 = 1
if event == 'return' and _is_jinja2_context_call(frame.f_back):
#we return from python code to Jinja2 rendering frame
info.pydev_step_stop = info.pydev_call_from_jinja2
info.pydev_call_from_jinja2 = None
thread.additional_info.suspend_type = JINJA2_SUSPEND
stop = False
#print "info.pydev_call_from_jinja2", info.pydev_call_from_jinja2, "stop_info", stop_info, \
# "thread.additional_info.suspend_type", thread.additional_info.suspend_type
#print "event", event, "farme.locals", frame.f_locals
return stop, plugin_stop
def cmd_step_over(plugin, pydb, frame, event, args, stop_info, stop):
pydb, filename, info, thread = args
plugin_stop = False
stop_info['jinja2_stop'] = False
if _is_jinja2_suspended(thread):
stop = False
if info.pydev_call_inside_jinja2 is None:
if _is_jinja2_render_call(frame):
if event == 'call':
info.pydev_call_inside_jinja2 = frame.f_back
if event in ('line', 'return'):
info.pydev_call_inside_jinja2 = frame
else:
if event == 'line':
if _is_jinja2_render_call(frame) and info.pydev_call_inside_jinja2 is frame:
stop_info['jinja2_stop'] = True
plugin_stop = stop_info['jinja2_stop']
if event == 'return':
if frame is info.pydev_call_inside_jinja2 and not dict_contains(frame.f_back.f_locals,'event'):
info.pydev_call_inside_jinja2 = _find_jinja2_render_frame(frame.f_back)
return stop, plugin_stop
else:
if event == 'return' and _is_jinja2_context_call(frame.f_back):
#we return from python code to Jinja2 rendering frame
info.pydev_call_from_jinja2 = None
info.pydev_call_inside_jinja2 = _find_jinja2_render_frame(frame)
thread.additional_info.suspend_type = JINJA2_SUSPEND
stop = False
return stop, plugin_stop
#print "info.pydev_call_from_jinja2", info.pydev_call_from_jinja2, "stop", stop, "jinja_stop", jinja2_stop, \
# "thread.additional_info.suspend_type", thread.additional_info.suspend_type
#print "event", event, "info.pydev_call_inside_jinja2", info.pydev_call_inside_jinja2
#print "frame", frame, "frame.f_back", frame.f_back, "step_stop", info.pydev_step_stop
#print "is_context_call", _is_jinja2_context_call(frame)
#print "render", _is_jinja2_render_call(frame)
#print "-------------"
return stop, plugin_stop
def stop(plugin, pydb, frame, event, args, stop_info, arg, step_cmd):
pydb, filename, info, thread = args
if dict_contains(stop_info, 'jinja2_stop') and stop_info['jinja2_stop']:
frame = _suspend_jinja2(pydb, thread, frame, step_cmd)
if frame:
pydb.do_wait_suspend(thread, frame, event, arg)
return True
return False
def get_breakpoint(plugin, pydb, pydb_frame, frame, event, args):
pydb, filename, info, thread = args
new_frame = None
jinja2_breakpoint = None
flag = False
type = 'jinja2'
if event in ('line', 'call') and info.pydev_state != STATE_SUSPEND and \
pydb.jinja2_breakpoints and _is_jinja2_render_call(frame):
filename = _get_jinja2_template_filename(frame)
jinja2_breakpoints_for_file = pydb.jinja2_breakpoints.get(filename)
new_frame = Jinja2TemplateFrame(frame)
if jinja2_breakpoints_for_file:
lineno = frame.f_lineno
template_lineno = _get_jinja2_template_line(frame)
if template_lineno is not None and dict_contains(jinja2_breakpoints_for_file, template_lineno):
jinja2_breakpoint = jinja2_breakpoints_for_file[template_lineno]
flag = True
new_frame = Jinja2TemplateFrame(frame)
return flag, jinja2_breakpoint, new_frame, type
def suspend(plugin, pydb, thread, frame, bp_type):
if bp_type == 'jinja2':
return _suspend_jinja2(pydb, thread, frame)
return None
def exception_break(plugin, pydb, pydb_frame, frame, args, arg):
pydb, filename, info, thread = args
exception, value, trace = arg
if pydb.jinja2_exception_break:
exception_type = dict_keys(pydb.jinja2_exception_break)[0]
if get_exception_name(exception) in ('UndefinedError', 'TemplateNotFound', 'TemplatesNotFound'):
#errors in rendering
render_frame = _find_jinja2_render_frame(frame)
if render_frame:
suspend_frame = _suspend_jinja2(pydb, thread, render_frame, CMD_ADD_EXCEPTION_BREAK, message=exception_type)
if suspend_frame:
add_exception_to_frame(suspend_frame, (exception, value, trace))
flag = True
suspend_frame.f_back = frame
frame = suspend_frame
return flag, frame
elif get_exception_name(exception) in ('TemplateSyntaxError', 'TemplateAssertionError'):
#errors in compile time
name = frame.f_code.co_name
if name in ('template', 'top-level template code', '<module>') or name.startswith('block '):
#Jinja2 translates exception info and creates fake frame on his own
pydb_frame.set_suspend(thread, CMD_ADD_EXCEPTION_BREAK)
add_exception_to_frame(frame, (exception, value, trace))
thread.additional_info.suspend_type = JINJA2_SUSPEND
thread.additional_info.pydev_message = str(exception_type)
flag = True
return flag, frame
return None | 39.712366 | 131 | 0.646314 | import traceback
from _pydevd_bundle.pydevd_breakpoints import LineBreakpoint, get_exception_name
from _pydevd_bundle.pydevd_constants import get_thread_id, STATE_SUSPEND, dict_contains, dict_iter_items, dict_keys, JINJA2_SUSPEND
from _pydevd_bundle.pydevd_comm import CMD_SET_BREAK, CMD_ADD_EXCEPTION_BREAK
from _pydevd_bundle import pydevd_vars
from pydevd_file_utils import get_abs_path_real_path_and_base_from_file
from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame, FCode
class Jinja2LineBreakpoint(LineBreakpoint):
def __init__(self, file, line, condition, func_name, expression):
self.file = file
LineBreakpoint.__init__(self, line, condition, func_name, expression)
def is_triggered(self, template_frame_file, template_frame_line):
return self.file == template_frame_file and self.line == template_frame_line
def __str__(self):
return "Jinja2LineBreakpoint: %s-%d" %(self.file, self.line)
def add_line_breakpoint(plugin, pydb, type, file, line, condition, expression, func_name):
result = None
if type == 'jinja2-line':
breakpoint = Jinja2LineBreakpoint(file, line, condition, func_name, expression)
if not hasattr(pydb, 'jinja2_breakpoints'):
_init_plugin_breaks(pydb)
result = breakpoint, pydb.jinja2_breakpoints
return result
return result
def add_exception_breakpoint(plugin, pydb, type, exception):
if type == 'jinja2':
if not hasattr(pydb, 'jinja2_exception_break'):
_init_plugin_breaks(pydb)
pydb.jinja2_exception_break[exception] = True
pydb.set_tracing_for_untraced_contexts()
return True
return False
def _init_plugin_breaks(pydb):
pydb.jinja2_exception_break = {}
pydb.jinja2_breakpoints = {}
def remove_exception_breakpoint(plugin, pydb, type, exception):
if type == 'jinja2':
try:
del pydb.jinja2_exception_break[exception]
return True
except:
pass
return False
def get_breakpoints(plugin, pydb, type):
if type == 'jinja2-line':
return pydb.jinja2_breakpoints
return None
def _is_jinja2_render_call(frame):
try:
name = frame.f_code.co_name
if dict_contains(frame.f_globals, "__jinja_template__") and name in ("root", "loop", "macro") or name.startswith("block_"):
return True
return False
except:
traceback.print_exc()
return False
def _suspend_jinja2(pydb, thread, frame, cmd=CMD_SET_BREAK, message=None):
frame = Jinja2TemplateFrame(frame)
if frame.f_lineno is None:
return None
pydevd_vars.add_additional_frame_by_id(get_thread_id(thread), {id(frame): frame})
pydb.set_suspend(thread, cmd)
thread.additional_info.suspend_type = JINJA2_SUSPEND
if cmd == CMD_ADD_EXCEPTION_BREAK:
if message:
message = str(message)
thread.additional_info.pydev_message = message
return frame
def _is_jinja2_suspended(thread):
return thread.additional_info.suspend_type == JINJA2_SUSPEND
def _is_jinja2_context_call(frame):
return dict_contains(frame.f_locals, "_Context__obj")
def _is_jinja2_internal_function(frame):
return dict_contains(frame.f_locals, 'self') and frame.f_locals['self'].__class__.__name__ in \
('LoopContext', 'TemplateReference', 'Macro', 'BlockReference')
def _find_jinja2_render_frame(frame):
while frame is not None and not _is_jinja2_render_call(frame):
frame = frame.f_back
return frame
class Jinja2TemplateFrame:
def __init__(self, frame):
file_name = _get_jinja2_template_filename(frame)
self.back_context = None
if 'context' in frame.f_locals:
self.back_context = frame.f_locals['context']
self.f_code = FCode('template', file_name)
self.f_lineno = _get_jinja2_template_line(frame)
self.f_back = frame
self.f_globals = {}
self.f_locals = self.collect_context(frame)
self.f_trace = None
def collect_context(self, frame):
res = {}
for k, v in frame.f_locals.items():
if not k.startswith('l_'):
res[k] = v
elif v and not _is_missing(v):
res[k[2:]] = v
if self.back_context is not None:
for k, v in self.back_context.items():
res[k] = v
return res
def _change_variable(self, frame, name, value):
in_vars_or_parents = False
if 'context' in frame.f_locals:
if name in frame.f_locals['context'].parent:
self.back_context.parent[name] = value
in_vars_or_parents = True
if name in frame.f_locals['context'].vars:
self.back_context.vars[name] = value
in_vars_or_parents = True
l_name = 'l_' + name
if l_name in frame.f_locals:
if in_vars_or_parents:
frame.f_locals[l_name] = self.back_context.resolve(name)
else:
frame.f_locals[l_name] = value
def change_variable(plugin, frame, attr, expression):
if isinstance(frame, Jinja2TemplateFrame):
result = eval(expression, frame.f_globals, frame.f_locals)
frame._change_variable(frame.f_back, attr, result)
return result
return False
def _is_missing(item):
if item.__class__.__name__ == 'MissingType':
return True
return False
def _find_render_function_frame(frame):
#in order to hide internal rendering functions
old_frame = frame
try:
while not (dict_contains(frame.f_locals, 'self') and frame.f_locals['self'].__class__.__name__ == 'Template' and \
frame.f_code.co_name == 'render'):
frame = frame.f_back
if frame is None:
return old_frame
return frame
except:
return old_frame
def _get_jinja2_template_line(frame):
debug_info = None
if dict_contains(frame.f_globals,'__jinja_template__'):
_debug_info = frame.f_globals['__jinja_template__']._debug_info
if _debug_info != '':
#sometimes template contains only plain text
debug_info = frame.f_globals['__jinja_template__'].debug_info
if debug_info is None:
return None
lineno = frame.f_lineno
for pair in debug_info:
if pair[1] == lineno:
return pair[0]
return None
def _get_jinja2_template_filename(frame):
if dict_contains(frame.f_globals, '__jinja_template__'):
fname = frame.f_globals['__jinja_template__'].filename
abs_path_real_path_and_base = get_abs_path_real_path_and_base_from_file(fname)
return abs_path_real_path_and_base[1]
return None
#=======================================================================================================================
# Jinja2 Step Commands
#=======================================================================================================================
def has_exception_breaks(plugin):
if len(plugin.main_debugger.jinja2_exception_break) > 0:
return True
return False
def has_line_breaks(plugin):
for file, breakpoints in dict_iter_items(plugin.main_debugger.jinja2_breakpoints):
if len(breakpoints) > 0:
return True
return False
def can_not_skip(plugin, pydb, pydb_frame, frame):
if pydb.jinja2_breakpoints and _is_jinja2_render_call(frame):
filename = _get_jinja2_template_filename(frame)
jinja2_breakpoints_for_file = pydb.jinja2_breakpoints.get(filename)
if jinja2_breakpoints_for_file:
return True
return False
def cmd_step_into(plugin, pydb, frame, event, args, stop_info, stop):
pydb, filename, info, thread = args
plugin_stop = False
stop_info['jinja2_stop'] = False
if _is_jinja2_suspended(thread):
stop_info['jinja2_stop'] = event in ('call', 'line') and _is_jinja2_render_call(frame)
plugin_stop = stop_info['jinja2_stop']
stop = False
if info.pydev_call_from_jinja2 is not None:
if _is_jinja2_internal_function(frame):
#if internal Jinja2 function was called, we sould continue debugging inside template
info.pydev_call_from_jinja2 = None
else:
#we go into python code from Jinja2 rendering frame
stop = True
if event == 'call' and _is_jinja2_context_call(frame.f_back):
#we called function from context, the next step will be in function
info.pydev_call_from_jinja2 = 1
if event == 'return' and _is_jinja2_context_call(frame.f_back):
#we return from python code to Jinja2 rendering frame
info.pydev_step_stop = info.pydev_call_from_jinja2
info.pydev_call_from_jinja2 = None
thread.additional_info.suspend_type = JINJA2_SUSPEND
stop = False
#print "info.pydev_call_from_jinja2", info.pydev_call_from_jinja2, "stop_info", stop_info, \
# "thread.additional_info.suspend_type", thread.additional_info.suspend_type
#print "event", event, "farme.locals", frame.f_locals
return stop, plugin_stop
def cmd_step_over(plugin, pydb, frame, event, args, stop_info, stop):
pydb, filename, info, thread = args
plugin_stop = False
stop_info['jinja2_stop'] = False
if _is_jinja2_suspended(thread):
stop = False
if info.pydev_call_inside_jinja2 is None:
if _is_jinja2_render_call(frame):
if event == 'call':
info.pydev_call_inside_jinja2 = frame.f_back
if event in ('line', 'return'):
info.pydev_call_inside_jinja2 = frame
else:
if event == 'line':
if _is_jinja2_render_call(frame) and info.pydev_call_inside_jinja2 is frame:
stop_info['jinja2_stop'] = True
plugin_stop = stop_info['jinja2_stop']
if event == 'return':
if frame is info.pydev_call_inside_jinja2 and not dict_contains(frame.f_back.f_locals,'event'):
info.pydev_call_inside_jinja2 = _find_jinja2_render_frame(frame.f_back)
return stop, plugin_stop
else:
if event == 'return' and _is_jinja2_context_call(frame.f_back):
#we return from python code to Jinja2 rendering frame
info.pydev_call_from_jinja2 = None
info.pydev_call_inside_jinja2 = _find_jinja2_render_frame(frame)
thread.additional_info.suspend_type = JINJA2_SUSPEND
stop = False
return stop, plugin_stop
#print "info.pydev_call_from_jinja2", info.pydev_call_from_jinja2, "stop", stop, "jinja_stop", jinja2_stop, \
# "thread.additional_info.suspend_type", thread.additional_info.suspend_type
#print "event", event, "info.pydev_call_inside_jinja2", info.pydev_call_inside_jinja2
#print "frame", frame, "frame.f_back", frame.f_back, "step_stop", info.pydev_step_stop
#print "is_context_call", _is_jinja2_context_call(frame)
#print "render", _is_jinja2_render_call(frame)
#print "-------------"
return stop, plugin_stop
def stop(plugin, pydb, frame, event, args, stop_info, arg, step_cmd):
pydb, filename, info, thread = args
if dict_contains(stop_info, 'jinja2_stop') and stop_info['jinja2_stop']:
frame = _suspend_jinja2(pydb, thread, frame, step_cmd)
if frame:
pydb.do_wait_suspend(thread, frame, event, arg)
return True
return False
def get_breakpoint(plugin, pydb, pydb_frame, frame, event, args):
pydb, filename, info, thread = args
new_frame = None
jinja2_breakpoint = None
flag = False
type = 'jinja2'
if event in ('line', 'call') and info.pydev_state != STATE_SUSPEND and \
pydb.jinja2_breakpoints and _is_jinja2_render_call(frame):
filename = _get_jinja2_template_filename(frame)
jinja2_breakpoints_for_file = pydb.jinja2_breakpoints.get(filename)
new_frame = Jinja2TemplateFrame(frame)
if jinja2_breakpoints_for_file:
lineno = frame.f_lineno
template_lineno = _get_jinja2_template_line(frame)
if template_lineno is not None and dict_contains(jinja2_breakpoints_for_file, template_lineno):
jinja2_breakpoint = jinja2_breakpoints_for_file[template_lineno]
flag = True
new_frame = Jinja2TemplateFrame(frame)
return flag, jinja2_breakpoint, new_frame, type
def suspend(plugin, pydb, thread, frame, bp_type):
if bp_type == 'jinja2':
return _suspend_jinja2(pydb, thread, frame)
return None
def exception_break(plugin, pydb, pydb_frame, frame, args, arg):
pydb, filename, info, thread = args
exception, value, trace = arg
if pydb.jinja2_exception_break:
exception_type = dict_keys(pydb.jinja2_exception_break)[0]
if get_exception_name(exception) in ('UndefinedError', 'TemplateNotFound', 'TemplatesNotFound'):
#errors in rendering
render_frame = _find_jinja2_render_frame(frame)
if render_frame:
suspend_frame = _suspend_jinja2(pydb, thread, render_frame, CMD_ADD_EXCEPTION_BREAK, message=exception_type)
if suspend_frame:
add_exception_to_frame(suspend_frame, (exception, value, trace))
flag = True
suspend_frame.f_back = frame
frame = suspend_frame
return flag, frame
elif get_exception_name(exception) in ('TemplateSyntaxError', 'TemplateAssertionError'):
#errors in compile time
name = frame.f_code.co_name
if name in ('template', 'top-level template code', '<module>') or name.startswith('block '):
#Jinja2 translates exception info and creates fake frame on his own
pydb_frame.set_suspend(thread, CMD_ADD_EXCEPTION_BREAK)
add_exception_to_frame(frame, (exception, value, trace))
thread.additional_info.suspend_type = JINJA2_SUSPEND
thread.additional_info.pydev_message = str(exception_type)
flag = True
return flag, frame
return None | true | true |
f71cdcfa194966b2387e234194561a46582002fa | 13,923 | py | Python | log_casp_inh/model_141.py | LoLab-VU/Bayesian_Inference_of_Network_Dynamics | 54a5ef7e868be34289836bbbb024a2963c0c9c86 | [
"MIT"
] | null | null | null | log_casp_inh/model_141.py | LoLab-VU/Bayesian_Inference_of_Network_Dynamics | 54a5ef7e868be34289836bbbb024a2963c0c9c86 | [
"MIT"
] | null | null | null | log_casp_inh/model_141.py | LoLab-VU/Bayesian_Inference_of_Network_Dynamics | 54a5ef7e868be34289836bbbb024a2963c0c9c86 | [
"MIT"
] | null | null | null | # exported from PySB model 'model'
from pysb import Model, Monomer, Parameter, Expression, Compartment, Rule, Observable, Initial, MatchOnce, Annotation, ANY, WILD
Model()
Monomer('C6A', ['C8pro'])
Monomer('BaxA', ['BaxM', 'BaxA_1', 'BaxA_2', 'SmacM'])
Monomer('Ligand', ['Receptor'])
Monomer('C6pro', ['C3A'])
Monomer('ParpU', ['C3A'])
Monomer('BidU', ['C8A'])
Monomer('BidT')
Monomer('C3A', ['Xiap', 'ParpU', 'C6pro'])
Monomer('BidM', ['BaxM'])
Monomer('BaxM', ['BidM', 'BaxA'])
Monomer('C8A', ['BidU', 'C3pro'])
Monomer('Xiap', ['SmacC', 'C3A'])
Monomer('Receptor', ['Ligand', 'Fadd'])
Monomer('C3ub')
Monomer('Fadd', ['Receptor', 'C8pro'])
Monomer('C3pro', ['C8A'])
Monomer('SmacM', ['BaxA'])
Monomer('SmacC', ['Xiap'])
Monomer('C8pro', ['Fadd', 'C6A'])
Monomer('ParpC')
Parameter('bind_0_Ligand_binder_Receptor_binder_target_2kf', 1.0)
Parameter('bind_0_Ligand_binder_Receptor_binder_target_1kr', 1.0)
Parameter('bind_0_Receptor_binder_Fadd_binder_target_2kf', 1.0)
Parameter('bind_0_Receptor_binder_Fadd_binder_target_1kr', 1.0)
Parameter('substrate_binding_0_Fadd_catalyzer_C8pro_substrate_2kf', 1.0)
Parameter('substrate_binding_0_Fadd_catalyzer_C8pro_substrate_1kr', 1.0)
Parameter('catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product_1kc', 1.0)
Parameter('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_2kf', 1.0)
Parameter('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_1kr', 1.0)
Parameter('catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product_1kc', 1.0)
Parameter('inhibition_0_SmacC_inhibitor_Xiap_inh_target_2kf', 1.0)
Parameter('inhibition_0_SmacC_inhibitor_Xiap_inh_target_1kr', 1.0)
Parameter('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_2kf', 1.0)
Parameter('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_1kr', 1.0)
Parameter('catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product_1kc', 1.0)
Parameter('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_2kf', 1.0)
Parameter('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_1kr', 1.0)
Parameter('catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product_1kc', 1.0)
Parameter('equilibration_0_BidT_equil_a_BidM_equil_b_1kf', 1.0)
Parameter('equilibration_0_BidT_equil_a_BidM_equil_b_1kr', 1.0)
Parameter('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_2kf', 1.0)
Parameter('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_1kr', 1.0)
Parameter('catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product_1kc', 1.0)
Parameter('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_2kf', 1.0)
Parameter('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_1kr', 1.0)
Parameter('self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate_1kc', 1.0)
Parameter('pore_formation_0_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_0_BaxA_pore_1kr', 1.0)
Parameter('pore_formation_1_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_1_BaxA_pore_1kr', 1.0)
Parameter('pore_formation_2_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_2_BaxA_pore_1kr', 1.0)
Parameter('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_2kf', 1.0)
Parameter('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kr', 1.0)
Parameter('transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kc', 1.0)
Parameter('catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_2kf', 1.0)
Parameter('catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_1kr', 1.0)
Parameter('catalysis_1_C8A_catalyzer_C3pro_substrate_C3A_product_1kc', 1.0)
Parameter('catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_2kf', 1.0)
Parameter('catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_1kr', 1.0)
Parameter('catalysis_1_C3A_catalyzer_C6pro_substrate_C6A_product_1kc', 1.0)
Parameter('catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_2kf', 1.0)
Parameter('catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_1kr', 1.0)
Parameter('catalysis_1_C6A_catalyzer_C8pro_substrate_C8A_product_1kc', 1.0)
Parameter('C6A_0', 0.0)
Parameter('BaxA_0', 0.0)
Parameter('Ligand_0', 1000.0)
Parameter('C6pro_0', 100.0)
Parameter('ParpU_0', 1000000.0)
Parameter('BidU_0', 171000.0)
Parameter('BidT_0', 0.0)
Parameter('C3A_0', 0.0)
Parameter('BidM_0', 0.0)
Parameter('BaxM_0', 40000.0)
Parameter('C8A_0', 0.0)
Parameter('Xiap_0', 35250.0)
Parameter('Receptor_0', 100.0)
Parameter('C3ub_0', 0.0)
Parameter('Fadd_0', 130000.0)
Parameter('C3pro_0', 21000.0)
Parameter('SmacM_0', 100000.0)
Parameter('SmacC_0', 0.0)
Parameter('C8pro_0', 130000.0)
Parameter('ParpC_0', 0.0)
Observable('C6A_obs', C6A())
Observable('BaxA_obs', BaxA())
Observable('Ligand_obs', Ligand())
Observable('C6pro_obs', C6pro())
Observable('ParpU_obs', ParpU())
Observable('BidU_obs', BidU())
Observable('BidT_obs', BidT())
Observable('C3A_obs', C3A())
Observable('BidM_obs', BidM())
Observable('BaxM_obs', BaxM())
Observable('C8A_obs', C8A())
Observable('Xiap_obs', Xiap())
Observable('Receptor_obs', Receptor())
Observable('C3ub_obs', C3ub())
Observable('Fadd_obs', Fadd())
Observable('C3pro_obs', C3pro())
Observable('SmacM_obs', SmacM())
Observable('SmacC_obs', SmacC())
Observable('C8pro_obs', C8pro())
Observable('ParpC_obs', ParpC())
Rule('bind_0_Ligand_binder_Receptor_binder_target', Ligand(Receptor=None) + Receptor(Ligand=None, Fadd=None) | Ligand(Receptor=1) % Receptor(Ligand=1, Fadd=None), bind_0_Ligand_binder_Receptor_binder_target_2kf, bind_0_Ligand_binder_Receptor_binder_target_1kr)
Rule('bind_0_Receptor_binder_Fadd_binder_target', Receptor(Ligand=ANY, Fadd=None) + Fadd(Receptor=None, C8pro=None) | Receptor(Ligand=ANY, Fadd=1) % Fadd(Receptor=1, C8pro=None), bind_0_Receptor_binder_Fadd_binder_target_2kf, bind_0_Receptor_binder_Fadd_binder_target_1kr)
Rule('substrate_binding_0_Fadd_catalyzer_C8pro_substrate', Fadd(Receptor=ANY, C8pro=None) + C8pro(Fadd=None, C6A=None) | Fadd(Receptor=ANY, C8pro=1) % C8pro(Fadd=1, C6A=None), substrate_binding_0_Fadd_catalyzer_C8pro_substrate_2kf, substrate_binding_0_Fadd_catalyzer_C8pro_substrate_1kr)
Rule('catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product', Fadd(Receptor=ANY, C8pro=1) % C8pro(Fadd=1, C6A=None) >> Fadd(Receptor=ANY, C8pro=None) + C8A(BidU=None, C3pro=None), catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product_1kc)
Rule('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product', C8A(BidU=None, C3pro=None) + BidU(C8A=None) | C8A(BidU=1, C3pro=None) % BidU(C8A=1), catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_2kf, catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_1kr)
Rule('catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product', C8A(BidU=1, C3pro=None) % BidU(C8A=1) >> C8A(BidU=None, C3pro=None) + BidT(), catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product_1kc)
Rule('inhibition_0_SmacC_inhibitor_Xiap_inh_target', SmacC(Xiap=None) + Xiap(SmacC=None, C3A=None) | SmacC(Xiap=1) % Xiap(SmacC=1, C3A=None), inhibition_0_SmacC_inhibitor_Xiap_inh_target_2kf, inhibition_0_SmacC_inhibitor_Xiap_inh_target_1kr)
Rule('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product', Xiap(SmacC=None, C3A=None) + C3A(Xiap=None, ParpU=None, C6pro=None) | Xiap(SmacC=None, C3A=1) % C3A(Xiap=1, ParpU=None, C6pro=None), catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_2kf, catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_1kr)
Rule('catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product', Xiap(SmacC=None, C3A=1) % C3A(Xiap=1, ParpU=None, C6pro=None) >> Xiap(SmacC=None, C3A=None) + C3ub(), catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product_1kc)
Rule('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product', C3A(Xiap=None, ParpU=None, C6pro=None) + ParpU(C3A=None) | C3A(Xiap=None, ParpU=1, C6pro=None) % ParpU(C3A=1), catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_2kf, catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_1kr)
Rule('catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product', C3A(Xiap=None, ParpU=1, C6pro=None) % ParpU(C3A=1) >> C3A(Xiap=None, ParpU=None, C6pro=None) + ParpC(), catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product_1kc)
Rule('equilibration_0_BidT_equil_a_BidM_equil_b', BidT() | BidM(BaxM=None), equilibration_0_BidT_equil_a_BidM_equil_b_1kf, equilibration_0_BidT_equil_a_BidM_equil_b_1kr)
Rule('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product', BidM(BaxM=None) + BaxM(BidM=None, BaxA=None) | BidM(BaxM=1) % BaxM(BidM=1, BaxA=None), catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_2kf, catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_1kr)
Rule('catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product', BidM(BaxM=1) % BaxM(BidM=1, BaxA=None) >> BidM(BaxM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None), catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product_1kc)
Rule('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None) + BaxM(BidM=None, BaxA=None) | BaxA(BaxM=1, BaxA_1=None, BaxA_2=None, SmacM=None) % BaxM(BidM=None, BaxA=1), self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_2kf, self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_1kr)
Rule('self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate', BaxA(BaxM=1, BaxA_1=None, BaxA_2=None, SmacM=None) % BaxM(BidM=None, BaxA=1) >> BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None), self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate_1kc)
Rule('pore_formation_0_BaxA_pore', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None) | BaxA(BaxM=None, BaxA_1=None, BaxA_2=1, SmacM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=None, SmacM=None), pore_formation_0_BaxA_pore_2kf, pore_formation_0_BaxA_pore_1kr)
Rule('pore_formation_1_BaxA_pore', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=1, SmacM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=None, SmacM=None) | BaxA(BaxM=None, BaxA_1=3, BaxA_2=1, SmacM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None), pore_formation_1_BaxA_pore_2kf, pore_formation_1_BaxA_pore_1kr)
Rule('pore_formation_2_BaxA_pore', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None) + BaxA(BaxM=None, BaxA_1=3, BaxA_2=1, SmacM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None) | BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None), pore_formation_2_BaxA_pore_2kf, pore_formation_2_BaxA_pore_1kr)
Rule('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C', BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None) + SmacM(BaxA=None) | BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=5) % SmacM(BaxA=5), transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_2kf, transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kr)
Rule('transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C', BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=5) % SmacM(BaxA=5) >> BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None) + SmacC(Xiap=None), transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kc)
Rule('catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product', C8A(BidU=None, C3pro=None) + C3pro(C8A=None) | C8A(BidU=None, C3pro=1) % C3pro(C8A=1), catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_2kf, catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_1kr)
Rule('catalysis_1_C8A_catalyzer_C3pro_substrate_C3A_product', C8A(BidU=None, C3pro=1) % C3pro(C8A=1) >> C8A(BidU=None, C3pro=None) + C3A(Xiap=None, ParpU=None, C6pro=None), catalysis_1_C8A_catalyzer_C3pro_substrate_C3A_product_1kc)
Rule('catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product', C3A(Xiap=None, ParpU=None, C6pro=None) + C6pro(C3A=None) | C3A(Xiap=None, ParpU=None, C6pro=1) % C6pro(C3A=1), catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_2kf, catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_1kr)
Rule('catalysis_1_C3A_catalyzer_C6pro_substrate_C6A_product', C3A(Xiap=None, ParpU=None, C6pro=1) % C6pro(C3A=1) >> C3A(Xiap=None, ParpU=None, C6pro=None) + C6A(C8pro=None), catalysis_1_C3A_catalyzer_C6pro_substrate_C6A_product_1kc)
Rule('catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product', C6A(C8pro=None) + C8pro(Fadd=None, C6A=None) | C6A(C8pro=1) % C8pro(Fadd=None, C6A=1), catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_2kf, catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_1kr)
Rule('catalysis_1_C6A_catalyzer_C8pro_substrate_C8A_product', C6A(C8pro=1) % C8pro(Fadd=None, C6A=1) >> C6A(C8pro=None) + C8A(BidU=None, C3pro=None), catalysis_1_C6A_catalyzer_C8pro_substrate_C8A_product_1kc)
Initial(C6A(C8pro=None), C6A_0)
Initial(BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None), BaxA_0)
Initial(Ligand(Receptor=None), Ligand_0)
Initial(C6pro(C3A=None), C6pro_0)
Initial(ParpU(C3A=None), ParpU_0)
Initial(BidU(C8A=None), BidU_0)
Initial(BidT(), BidT_0)
Initial(C3A(Xiap=None, ParpU=None, C6pro=None), C3A_0)
Initial(BidM(BaxM=None), BidM_0)
Initial(BaxM(BidM=None, BaxA=None), BaxM_0)
Initial(C8A(BidU=None, C3pro=None), C8A_0)
Initial(Xiap(SmacC=None, C3A=None), Xiap_0)
Initial(Receptor(Ligand=None, Fadd=None), Receptor_0)
Initial(C3ub(), C3ub_0)
Initial(Fadd(Receptor=None, C8pro=None), Fadd_0)
Initial(C3pro(C8A=None), C3pro_0)
Initial(SmacM(BaxA=None), SmacM_0)
Initial(SmacC(Xiap=None), SmacC_0)
Initial(C8pro(Fadd=None, C6A=None), C8pro_0)
Initial(ParpC(), ParpC_0)
| 85.417178 | 598 | 0.808518 |
from pysb import Model, Monomer, Parameter, Expression, Compartment, Rule, Observable, Initial, MatchOnce, Annotation, ANY, WILD
Model()
Monomer('C6A', ['C8pro'])
Monomer('BaxA', ['BaxM', 'BaxA_1', 'BaxA_2', 'SmacM'])
Monomer('Ligand', ['Receptor'])
Monomer('C6pro', ['C3A'])
Monomer('ParpU', ['C3A'])
Monomer('BidU', ['C8A'])
Monomer('BidT')
Monomer('C3A', ['Xiap', 'ParpU', 'C6pro'])
Monomer('BidM', ['BaxM'])
Monomer('BaxM', ['BidM', 'BaxA'])
Monomer('C8A', ['BidU', 'C3pro'])
Monomer('Xiap', ['SmacC', 'C3A'])
Monomer('Receptor', ['Ligand', 'Fadd'])
Monomer('C3ub')
Monomer('Fadd', ['Receptor', 'C8pro'])
Monomer('C3pro', ['C8A'])
Monomer('SmacM', ['BaxA'])
Monomer('SmacC', ['Xiap'])
Monomer('C8pro', ['Fadd', 'C6A'])
Monomer('ParpC')
Parameter('bind_0_Ligand_binder_Receptor_binder_target_2kf', 1.0)
Parameter('bind_0_Ligand_binder_Receptor_binder_target_1kr', 1.0)
Parameter('bind_0_Receptor_binder_Fadd_binder_target_2kf', 1.0)
Parameter('bind_0_Receptor_binder_Fadd_binder_target_1kr', 1.0)
Parameter('substrate_binding_0_Fadd_catalyzer_C8pro_substrate_2kf', 1.0)
Parameter('substrate_binding_0_Fadd_catalyzer_C8pro_substrate_1kr', 1.0)
Parameter('catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product_1kc', 1.0)
Parameter('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_2kf', 1.0)
Parameter('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_1kr', 1.0)
Parameter('catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product_1kc', 1.0)
Parameter('inhibition_0_SmacC_inhibitor_Xiap_inh_target_2kf', 1.0)
Parameter('inhibition_0_SmacC_inhibitor_Xiap_inh_target_1kr', 1.0)
Parameter('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_2kf', 1.0)
Parameter('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_1kr', 1.0)
Parameter('catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product_1kc', 1.0)
Parameter('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_2kf', 1.0)
Parameter('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_1kr', 1.0)
Parameter('catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product_1kc', 1.0)
Parameter('equilibration_0_BidT_equil_a_BidM_equil_b_1kf', 1.0)
Parameter('equilibration_0_BidT_equil_a_BidM_equil_b_1kr', 1.0)
Parameter('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_2kf', 1.0)
Parameter('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_1kr', 1.0)
Parameter('catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product_1kc', 1.0)
Parameter('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_2kf', 1.0)
Parameter('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_1kr', 1.0)
Parameter('self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate_1kc', 1.0)
Parameter('pore_formation_0_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_0_BaxA_pore_1kr', 1.0)
Parameter('pore_formation_1_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_1_BaxA_pore_1kr', 1.0)
Parameter('pore_formation_2_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_2_BaxA_pore_1kr', 1.0)
Parameter('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_2kf', 1.0)
Parameter('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kr', 1.0)
Parameter('transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kc', 1.0)
Parameter('catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_2kf', 1.0)
Parameter('catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_1kr', 1.0)
Parameter('catalysis_1_C8A_catalyzer_C3pro_substrate_C3A_product_1kc', 1.0)
Parameter('catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_2kf', 1.0)
Parameter('catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_1kr', 1.0)
Parameter('catalysis_1_C3A_catalyzer_C6pro_substrate_C6A_product_1kc', 1.0)
Parameter('catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_2kf', 1.0)
Parameter('catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_1kr', 1.0)
Parameter('catalysis_1_C6A_catalyzer_C8pro_substrate_C8A_product_1kc', 1.0)
Parameter('C6A_0', 0.0)
Parameter('BaxA_0', 0.0)
Parameter('Ligand_0', 1000.0)
Parameter('C6pro_0', 100.0)
Parameter('ParpU_0', 1000000.0)
Parameter('BidU_0', 171000.0)
Parameter('BidT_0', 0.0)
Parameter('C3A_0', 0.0)
Parameter('BidM_0', 0.0)
Parameter('BaxM_0', 40000.0)
Parameter('C8A_0', 0.0)
Parameter('Xiap_0', 35250.0)
Parameter('Receptor_0', 100.0)
Parameter('C3ub_0', 0.0)
Parameter('Fadd_0', 130000.0)
Parameter('C3pro_0', 21000.0)
Parameter('SmacM_0', 100000.0)
Parameter('SmacC_0', 0.0)
Parameter('C8pro_0', 130000.0)
Parameter('ParpC_0', 0.0)
Observable('C6A_obs', C6A())
Observable('BaxA_obs', BaxA())
Observable('Ligand_obs', Ligand())
Observable('C6pro_obs', C6pro())
Observable('ParpU_obs', ParpU())
Observable('BidU_obs', BidU())
Observable('BidT_obs', BidT())
Observable('C3A_obs', C3A())
Observable('BidM_obs', BidM())
Observable('BaxM_obs', BaxM())
Observable('C8A_obs', C8A())
Observable('Xiap_obs', Xiap())
Observable('Receptor_obs', Receptor())
Observable('C3ub_obs', C3ub())
Observable('Fadd_obs', Fadd())
Observable('C3pro_obs', C3pro())
Observable('SmacM_obs', SmacM())
Observable('SmacC_obs', SmacC())
Observable('C8pro_obs', C8pro())
Observable('ParpC_obs', ParpC())
Rule('bind_0_Ligand_binder_Receptor_binder_target', Ligand(Receptor=None) + Receptor(Ligand=None, Fadd=None) | Ligand(Receptor=1) % Receptor(Ligand=1, Fadd=None), bind_0_Ligand_binder_Receptor_binder_target_2kf, bind_0_Ligand_binder_Receptor_binder_target_1kr)
Rule('bind_0_Receptor_binder_Fadd_binder_target', Receptor(Ligand=ANY, Fadd=None) + Fadd(Receptor=None, C8pro=None) | Receptor(Ligand=ANY, Fadd=1) % Fadd(Receptor=1, C8pro=None), bind_0_Receptor_binder_Fadd_binder_target_2kf, bind_0_Receptor_binder_Fadd_binder_target_1kr)
Rule('substrate_binding_0_Fadd_catalyzer_C8pro_substrate', Fadd(Receptor=ANY, C8pro=None) + C8pro(Fadd=None, C6A=None) | Fadd(Receptor=ANY, C8pro=1) % C8pro(Fadd=1, C6A=None), substrate_binding_0_Fadd_catalyzer_C8pro_substrate_2kf, substrate_binding_0_Fadd_catalyzer_C8pro_substrate_1kr)
Rule('catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product', Fadd(Receptor=ANY, C8pro=1) % C8pro(Fadd=1, C6A=None) >> Fadd(Receptor=ANY, C8pro=None) + C8A(BidU=None, C3pro=None), catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product_1kc)
Rule('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product', C8A(BidU=None, C3pro=None) + BidU(C8A=None) | C8A(BidU=1, C3pro=None) % BidU(C8A=1), catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_2kf, catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_1kr)
Rule('catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product', C8A(BidU=1, C3pro=None) % BidU(C8A=1) >> C8A(BidU=None, C3pro=None) + BidT(), catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product_1kc)
Rule('inhibition_0_SmacC_inhibitor_Xiap_inh_target', SmacC(Xiap=None) + Xiap(SmacC=None, C3A=None) | SmacC(Xiap=1) % Xiap(SmacC=1, C3A=None), inhibition_0_SmacC_inhibitor_Xiap_inh_target_2kf, inhibition_0_SmacC_inhibitor_Xiap_inh_target_1kr)
Rule('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product', Xiap(SmacC=None, C3A=None) + C3A(Xiap=None, ParpU=None, C6pro=None) | Xiap(SmacC=None, C3A=1) % C3A(Xiap=1, ParpU=None, C6pro=None), catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_2kf, catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_1kr)
Rule('catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product', Xiap(SmacC=None, C3A=1) % C3A(Xiap=1, ParpU=None, C6pro=None) >> Xiap(SmacC=None, C3A=None) + C3ub(), catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product_1kc)
Rule('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product', C3A(Xiap=None, ParpU=None, C6pro=None) + ParpU(C3A=None) | C3A(Xiap=None, ParpU=1, C6pro=None) % ParpU(C3A=1), catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_2kf, catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_1kr)
Rule('catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product', C3A(Xiap=None, ParpU=1, C6pro=None) % ParpU(C3A=1) >> C3A(Xiap=None, ParpU=None, C6pro=None) + ParpC(), catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product_1kc)
Rule('equilibration_0_BidT_equil_a_BidM_equil_b', BidT() | BidM(BaxM=None), equilibration_0_BidT_equil_a_BidM_equil_b_1kf, equilibration_0_BidT_equil_a_BidM_equil_b_1kr)
Rule('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product', BidM(BaxM=None) + BaxM(BidM=None, BaxA=None) | BidM(BaxM=1) % BaxM(BidM=1, BaxA=None), catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_2kf, catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_1kr)
Rule('catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product', BidM(BaxM=1) % BaxM(BidM=1, BaxA=None) >> BidM(BaxM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None), catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product_1kc)
Rule('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None) + BaxM(BidM=None, BaxA=None) | BaxA(BaxM=1, BaxA_1=None, BaxA_2=None, SmacM=None) % BaxM(BidM=None, BaxA=1), self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_2kf, self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_1kr)
Rule('self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate', BaxA(BaxM=1, BaxA_1=None, BaxA_2=None, SmacM=None) % BaxM(BidM=None, BaxA=1) >> BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None), self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate_1kc)
Rule('pore_formation_0_BaxA_pore', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None) | BaxA(BaxM=None, BaxA_1=None, BaxA_2=1, SmacM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=None, SmacM=None), pore_formation_0_BaxA_pore_2kf, pore_formation_0_BaxA_pore_1kr)
Rule('pore_formation_1_BaxA_pore', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=1, SmacM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=None, SmacM=None) | BaxA(BaxM=None, BaxA_1=3, BaxA_2=1, SmacM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None), pore_formation_1_BaxA_pore_2kf, pore_formation_1_BaxA_pore_1kr)
Rule('pore_formation_2_BaxA_pore', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None) + BaxA(BaxM=None, BaxA_1=3, BaxA_2=1, SmacM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None) | BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None), pore_formation_2_BaxA_pore_2kf, pore_formation_2_BaxA_pore_1kr)
Rule('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C', BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None) + SmacM(BaxA=None) | BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=5) % SmacM(BaxA=5), transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_2kf, transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kr)
Rule('transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C', BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=5) % SmacM(BaxA=5) >> BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None) + SmacC(Xiap=None), transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kc)
Rule('catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product', C8A(BidU=None, C3pro=None) + C3pro(C8A=None) | C8A(BidU=None, C3pro=1) % C3pro(C8A=1), catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_2kf, catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_1kr)
Rule('catalysis_1_C8A_catalyzer_C3pro_substrate_C3A_product', C8A(BidU=None, C3pro=1) % C3pro(C8A=1) >> C8A(BidU=None, C3pro=None) + C3A(Xiap=None, ParpU=None, C6pro=None), catalysis_1_C8A_catalyzer_C3pro_substrate_C3A_product_1kc)
Rule('catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product', C3A(Xiap=None, ParpU=None, C6pro=None) + C6pro(C3A=None) | C3A(Xiap=None, ParpU=None, C6pro=1) % C6pro(C3A=1), catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_2kf, catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_1kr)
Rule('catalysis_1_C3A_catalyzer_C6pro_substrate_C6A_product', C3A(Xiap=None, ParpU=None, C6pro=1) % C6pro(C3A=1) >> C3A(Xiap=None, ParpU=None, C6pro=None) + C6A(C8pro=None), catalysis_1_C3A_catalyzer_C6pro_substrate_C6A_product_1kc)
Rule('catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product', C6A(C8pro=None) + C8pro(Fadd=None, C6A=None) | C6A(C8pro=1) % C8pro(Fadd=None, C6A=1), catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_2kf, catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_1kr)
Rule('catalysis_1_C6A_catalyzer_C8pro_substrate_C8A_product', C6A(C8pro=1) % C8pro(Fadd=None, C6A=1) >> C6A(C8pro=None) + C8A(BidU=None, C3pro=None), catalysis_1_C6A_catalyzer_C8pro_substrate_C8A_product_1kc)
Initial(C6A(C8pro=None), C6A_0)
Initial(BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None), BaxA_0)
Initial(Ligand(Receptor=None), Ligand_0)
Initial(C6pro(C3A=None), C6pro_0)
Initial(ParpU(C3A=None), ParpU_0)
Initial(BidU(C8A=None), BidU_0)
Initial(BidT(), BidT_0)
Initial(C3A(Xiap=None, ParpU=None, C6pro=None), C3A_0)
Initial(BidM(BaxM=None), BidM_0)
Initial(BaxM(BidM=None, BaxA=None), BaxM_0)
Initial(C8A(BidU=None, C3pro=None), C8A_0)
Initial(Xiap(SmacC=None, C3A=None), Xiap_0)
Initial(Receptor(Ligand=None, Fadd=None), Receptor_0)
Initial(C3ub(), C3ub_0)
Initial(Fadd(Receptor=None, C8pro=None), Fadd_0)
Initial(C3pro(C8A=None), C3pro_0)
Initial(SmacM(BaxA=None), SmacM_0)
Initial(SmacC(Xiap=None), SmacC_0)
Initial(C8pro(Fadd=None, C6A=None), C8pro_0)
Initial(ParpC(), ParpC_0)
| true | true |
f71cdd5bf8f43adb0496971aa022174636ea52d4 | 6,042 | py | Python | sickbeard/cherrypy/_cpnative_server.py | Branlala/docker-sickbeardfr | 3ac85092dc4cc8a4171fb3c83e9682162245e13e | [
"MIT"
] | null | null | null | sickbeard/cherrypy/_cpnative_server.py | Branlala/docker-sickbeardfr | 3ac85092dc4cc8a4171fb3c83e9682162245e13e | [
"MIT"
] | null | null | null | sickbeard/cherrypy/_cpnative_server.py | Branlala/docker-sickbeardfr | 3ac85092dc4cc8a4171fb3c83e9682162245e13e | [
"MIT"
] | null | null | null | """Native adapter for serving CherryPy via its builtin server."""
import logging
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
import cherrypy
from cherrypy._cperror import format_exc, bare_error
from cherrypy.lib import httputil
from cherrypy import wsgiserver
class NativeGateway(wsgiserver.Gateway):
recursive = False
def respond(self):
req = self.req
try:
# Obtain a Request object from CherryPy
local = req.server.bind_addr
local = httputil.Host(local[0], local[1], "")
remote = req.conn.remote_addr, req.conn.remote_port
remote = httputil.Host(remote[0], remote[1], "")
scheme = req.scheme
sn = cherrypy.tree.script_name(req.uri or "/")
if sn is None:
self.send_response('404 Not Found', [], [''])
else:
app = cherrypy.tree.apps[sn]
method = req.method
path = req.path
qs = req.qs or ""
headers = req.inheaders.items()
rfile = req.rfile
prev = None
try:
redirections = []
while True:
request, response = app.get_serving(
local, remote, scheme, "HTTP/1.1")
request.multithread = True
request.multiprocess = False
request.app = app
request.prev = prev
# Run the CherryPy Request object and obtain the response
try:
request.run(method, path, qs, req.request_protocol, headers, rfile)
break
except cherrypy.InternalRedirect, ir:
app.release_serving()
prev = request
if not self.recursive:
if ir.path in redirections:
raise RuntimeError("InternalRedirector visited the "
"same URL twice: %r" % ir.path)
else:
# Add the *previous* path_info + qs to redirections.
if qs:
qs = "?" + qs
redirections.append(sn + path + qs)
# Munge environment and try again.
method = "GET"
path = ir.path
qs = ir.query_string
rfile = StringIO()
self.send_response(
response.output_status, response.header_list,
response.body)
finally:
app.release_serving()
except:
tb = format_exc()
#print tb
cherrypy.log(tb, 'NATIVE_ADAPTER', severity=logging.ERROR)
s, h, b = bare_error()
self.send_response(s, h, b)
def send_response(self, status, headers, body):
req = self.req
# Set response status
req.status = str(status or "500 Server Error")
# Set response headers
for header, value in headers:
req.outheaders.append((header, value))
if (req.ready and not req.sent_headers):
req.sent_headers = True
req.send_headers()
# Set response body
for seg in body:
req.write(seg)
class CPHTTPServer(wsgiserver.HTTPServer):
"""Wrapper for wsgiserver.HTTPServer.
wsgiserver has been designed to not reference CherryPy in any way,
so that it can be used in other frameworks and applications.
Therefore, we wrap it here, so we can apply some attributes
from config -> cherrypy.server -> HTTPServer.
"""
def __init__(self, server_adapter=cherrypy.server):
self.server_adapter = server_adapter
server_name = (self.server_adapter.socket_host or
self.server_adapter.socket_file or
None)
wsgiserver.HTTPServer.__init__(
self, server_adapter.bind_addr, NativeGateway,
minthreads=server_adapter.thread_pool,
maxthreads=server_adapter.thread_pool_max,
server_name=server_name)
self.max_request_header_size = self.server_adapter.max_request_header_size or 0
self.max_request_body_size = self.server_adapter.max_request_body_size or 0
self.request_queue_size = self.server_adapter.socket_queue_size
self.timeout = self.server_adapter.socket_timeout
self.shutdown_timeout = self.server_adapter.shutdown_timeout
self.protocol = self.server_adapter.protocol_version
self.nodelay = self.server_adapter.nodelay
ssl_module = self.server_adapter.ssl_module or 'pyopenssl'
if self.server_adapter.ssl_context:
adapter_class = wsgiserver.get_ssl_adapter_class(ssl_module)
self.ssl_adapter = adapter_class(
self.server_adapter.ssl_certificate,
self.server_adapter.ssl_private_key,
self.server_adapter.ssl_certificate_chain)
self.ssl_adapter.context = self.server_adapter.ssl_context
elif self.server_adapter.ssl_certificate:
adapter_class = wsgiserver.get_ssl_adapter_class(ssl_module)
self.ssl_adapter = adapter_class(
self.server_adapter.ssl_certificate,
self.server_adapter.ssl_private_key,
self.server_adapter.ssl_certificate_chain)
| 40.013245 | 95 | 0.53095 | """Native adapter for serving CherryPy via its builtin server."""
import logging
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
import cherrypy
from cherrypy._cperror import format_exc, bare_error
from cherrypy.lib import httputil
from cherrypy import wsgiserver
class NativeGateway(wsgiserver.Gateway):
recursive = False
def respond(self):
req = self.req
try:
local = req.server.bind_addr
local = httputil.Host(local[0], local[1], "")
remote = req.conn.remote_addr, req.conn.remote_port
remote = httputil.Host(remote[0], remote[1], "")
scheme = req.scheme
sn = cherrypy.tree.script_name(req.uri or "/")
if sn is None:
self.send_response('404 Not Found', [], [''])
else:
app = cherrypy.tree.apps[sn]
method = req.method
path = req.path
qs = req.qs or ""
headers = req.inheaders.items()
rfile = req.rfile
prev = None
try:
redirections = []
while True:
request, response = app.get_serving(
local, remote, scheme, "HTTP/1.1")
request.multithread = True
request.multiprocess = False
request.app = app
request.prev = prev
try:
request.run(method, path, qs, req.request_protocol, headers, rfile)
break
except cherrypy.InternalRedirect, ir:
app.release_serving()
prev = request
if not self.recursive:
if ir.path in redirections:
raise RuntimeError("InternalRedirector visited the "
"same URL twice: %r" % ir.path)
else:
if qs:
qs = "?" + qs
redirections.append(sn + path + qs)
method = "GET"
path = ir.path
qs = ir.query_string
rfile = StringIO()
self.send_response(
response.output_status, response.header_list,
response.body)
finally:
app.release_serving()
except:
tb = format_exc()
cherrypy.log(tb, 'NATIVE_ADAPTER', severity=logging.ERROR)
s, h, b = bare_error()
self.send_response(s, h, b)
def send_response(self, status, headers, body):
req = self.req
req.status = str(status or "500 Server Error")
for header, value in headers:
req.outheaders.append((header, value))
if (req.ready and not req.sent_headers):
req.sent_headers = True
req.send_headers()
for seg in body:
req.write(seg)
class CPHTTPServer(wsgiserver.HTTPServer):
"""Wrapper for wsgiserver.HTTPServer.
wsgiserver has been designed to not reference CherryPy in any way,
so that it can be used in other frameworks and applications.
Therefore, we wrap it here, so we can apply some attributes
from config -> cherrypy.server -> HTTPServer.
"""
def __init__(self, server_adapter=cherrypy.server):
self.server_adapter = server_adapter
server_name = (self.server_adapter.socket_host or
self.server_adapter.socket_file or
None)
wsgiserver.HTTPServer.__init__(
self, server_adapter.bind_addr, NativeGateway,
minthreads=server_adapter.thread_pool,
maxthreads=server_adapter.thread_pool_max,
server_name=server_name)
self.max_request_header_size = self.server_adapter.max_request_header_size or 0
self.max_request_body_size = self.server_adapter.max_request_body_size or 0
self.request_queue_size = self.server_adapter.socket_queue_size
self.timeout = self.server_adapter.socket_timeout
self.shutdown_timeout = self.server_adapter.shutdown_timeout
self.protocol = self.server_adapter.protocol_version
self.nodelay = self.server_adapter.nodelay
ssl_module = self.server_adapter.ssl_module or 'pyopenssl'
if self.server_adapter.ssl_context:
adapter_class = wsgiserver.get_ssl_adapter_class(ssl_module)
self.ssl_adapter = adapter_class(
self.server_adapter.ssl_certificate,
self.server_adapter.ssl_private_key,
self.server_adapter.ssl_certificate_chain)
self.ssl_adapter.context = self.server_adapter.ssl_context
elif self.server_adapter.ssl_certificate:
adapter_class = wsgiserver.get_ssl_adapter_class(ssl_module)
self.ssl_adapter = adapter_class(
self.server_adapter.ssl_certificate,
self.server_adapter.ssl_private_key,
self.server_adapter.ssl_certificate_chain)
| false | true |
f71cdebb1cdf3286b45c66f1071264ecfee8c65a | 873 | py | Python | state/go_home.py | LHGames-2018/espace | ce9ed47fee285d60bf7183132d5686ad8198b70f | [
"MIT"
] | null | null | null | state/go_home.py | LHGames-2018/espace | ce9ed47fee285d60bf7183132d5686ad8198b70f | [
"MIT"
] | null | null | null | state/go_home.py | LHGames-2018/espace | ce9ed47fee285d60bf7183132d5686ad8198b70f | [
"MIT"
] | null | null | null | from state.machine import BaseState
from helper import *
import state
class GoHomeState(BaseState):
def action(self, game_state):
my_pos = game_state_helper.get_my_position(game_state)
poids, next_move = game_state_helper.get_home(game_state)
if not next_move:
vector = game_state['PlayerInfo'].HouseLocation - my_pos
if abs(vector.x) > abs(vector.y):
next_move = Point(-1 if vector.x < 0 else 1, 0)
else:
next_move = Point(0, -1 if vector.y < 0 else 1)
if poids == 0:
return state.GatherResourcesState(), None
print(my_pos, next_move, file=__import__('sys').stderr)
tile_content = game_state['parsedGameMap'][(my_pos + next_move).to_tuple()]
action = create_move_action(tile_content, next_move)
return None, action
| 30.103448 | 83 | 0.635739 | from state.machine import BaseState
from helper import *
import state
class GoHomeState(BaseState):
def action(self, game_state):
my_pos = game_state_helper.get_my_position(game_state)
poids, next_move = game_state_helper.get_home(game_state)
if not next_move:
vector = game_state['PlayerInfo'].HouseLocation - my_pos
if abs(vector.x) > abs(vector.y):
next_move = Point(-1 if vector.x < 0 else 1, 0)
else:
next_move = Point(0, -1 if vector.y < 0 else 1)
if poids == 0:
return state.GatherResourcesState(), None
print(my_pos, next_move, file=__import__('sys').stderr)
tile_content = game_state['parsedGameMap'][(my_pos + next_move).to_tuple()]
action = create_move_action(tile_content, next_move)
return None, action
| true | true |
f71cdecd4a4c849ec91e2ee1edc93e4c72d44a98 | 1,158 | py | Python | google/pubsub_v1/services/subscriber/transports/__init__.py | acocuzzo/python-pubsub | fcb67dd0d8fff5a583ebe0a3a08d0219601df8e9 | [
"Apache-2.0"
] | null | null | null | google/pubsub_v1/services/subscriber/transports/__init__.py | acocuzzo/python-pubsub | fcb67dd0d8fff5a583ebe0a3a08d0219601df8e9 | [
"Apache-2.0"
] | null | null | null | google/pubsub_v1/services/subscriber/transports/__init__.py | acocuzzo/python-pubsub | fcb67dd0d8fff5a583ebe0a3a08d0219601df8e9 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
from typing import Dict, Type
from .base import SubscriberTransport
from .grpc import SubscriberGrpcTransport
from .grpc_asyncio import SubscriberGrpcAsyncIOTransport
# Compile a registry of transports.
_transport_registry = OrderedDict() # type: Dict[str, Type[SubscriberTransport]]
_transport_registry["grpc"] = SubscriberGrpcTransport
_transport_registry["grpc_asyncio"] = SubscriberGrpcAsyncIOTransport
__all__ = (
"SubscriberTransport",
"SubscriberGrpcTransport",
"SubscriberGrpcAsyncIOTransport",
)
| 34.058824 | 81 | 0.780656 |
from collections import OrderedDict
from typing import Dict, Type
from .base import SubscriberTransport
from .grpc import SubscriberGrpcTransport
from .grpc_asyncio import SubscriberGrpcAsyncIOTransport
_transport_registry = OrderedDict()
_transport_registry["grpc"] = SubscriberGrpcTransport
_transport_registry["grpc_asyncio"] = SubscriberGrpcAsyncIOTransport
__all__ = (
"SubscriberTransport",
"SubscriberGrpcTransport",
"SubscriberGrpcAsyncIOTransport",
)
| true | true |
f71cdf394d949f1c6fa70b3924bafe111b4036a5 | 692 | py | Python | cumulusci/cli/logger.py | jayhatha/CumulusCI | b3864621d3aca72dec669339af08657526582344 | [
"BSD-3-Clause"
] | null | null | null | cumulusci/cli/logger.py | jayhatha/CumulusCI | b3864621d3aca72dec669339af08657526582344 | [
"BSD-3-Clause"
] | null | null | null | cumulusci/cli/logger.py | jayhatha/CumulusCI | b3864621d3aca72dec669339af08657526582344 | [
"BSD-3-Clause"
] | null | null | null | """ CLI logger """
from __future__ import unicode_literals
import logging
import coloredlogs
import requests
def init_logger(log_requests=False):
""" Initialize the logger """
logger = logging.getLogger(__name__.split(".")[0])
for handler in logger.handlers: # pragma: nocover
logger.removeHandler(handler)
formatter = coloredlogs.ColoredFormatter(fmt="%(asctime)s: %(message)s")
handler = logging.StreamHandler()
handler.setLevel(logging.DEBUG)
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
logger.propagate = False
if log_requests:
requests.packages.urllib3.add_stderr_logger()
| 25.62963 | 76 | 0.722543 | from __future__ import unicode_literals
import logging
import coloredlogs
import requests
def init_logger(log_requests=False):
logger = logging.getLogger(__name__.split(".")[0])
for handler in logger.handlers:
logger.removeHandler(handler)
formatter = coloredlogs.ColoredFormatter(fmt="%(asctime)s: %(message)s")
handler = logging.StreamHandler()
handler.setLevel(logging.DEBUG)
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
logger.propagate = False
if log_requests:
requests.packages.urllib3.add_stderr_logger()
| true | true |
f71ce1aa0ef3d6fbbb1c1d05ae5adc8f5fcb088b | 25,861 | py | Python | pint/testsuite/test_issues.py | clarkgwillison/pint | 03be0544b749ee55fbd1253d18e2a84151dce716 | [
"BSD-3-Clause"
] | 2 | 2020-07-08T20:09:35.000Z | 2021-03-05T12:51:30.000Z | pint/testsuite/test_issues.py | clarkgwillison/pint | 03be0544b749ee55fbd1253d18e2a84151dce716 | [
"BSD-3-Clause"
] | null | null | null | pint/testsuite/test_issues.py | clarkgwillison/pint | 03be0544b749ee55fbd1253d18e2a84151dce716 | [
"BSD-3-Clause"
] | null | null | null | import copy
import math
import pprint
import unittest
import pytest
from pint import Context, DimensionalityError, UnitRegistry
from pint.compat import np
from pint.testsuite import QuantityTestCase, helpers
from pint.unit import UnitsContainer
from pint.util import ParserHelper
ureg = UnitRegistry()
class TestIssues(QuantityTestCase):
FORCE_NDARRAY = False
def setup(self):
self.ureg.autoconvert_offset_to_baseunit = False
@unittest.expectedFailure
def test_issue25(self):
x = ParserHelper.from_string("10 %")
self.assertEqual(x, ParserHelper(10, {"%": 1}))
x = ParserHelper.from_string("10 ‰")
self.assertEqual(x, ParserHelper(10, {"‰": 1}))
ureg.define("percent = [fraction]; offset: 0 = %")
ureg.define("permille = percent / 10 = ‰")
x = ureg.parse_expression("10 %")
self.assertEqual(x, ureg.Quantity(10, {"%": 1}))
y = ureg.parse_expression("10 ‰")
self.assertEqual(y, ureg.Quantity(10, {"‰": 1}))
self.assertEqual(x.to("‰"), ureg.Quantity(1, {"‰": 1}))
def test_issue29(self):
t = 4 * ureg("mW")
self.assertEqual(t.magnitude, 4)
self.assertEqual(t._units, UnitsContainer(milliwatt=1))
self.assertEqual(t.to("joule / second"), 4e-3 * ureg("W"))
@unittest.expectedFailure
@helpers.requires_numpy()
def test_issue37(self):
x = np.ma.masked_array([1, 2, 3], mask=[True, True, False])
q = ureg.meter * x
self.assertIsInstance(q, ureg.Quantity)
np.testing.assert_array_equal(q.magnitude, x)
self.assertEqual(q.units, ureg.meter.units)
q = x * ureg.meter
self.assertIsInstance(q, ureg.Quantity)
np.testing.assert_array_equal(q.magnitude, x)
self.assertEqual(q.units, ureg.meter.units)
m = np.ma.masked_array(2 * np.ones(3, 3))
qq = q * m
self.assertIsInstance(qq, ureg.Quantity)
np.testing.assert_array_equal(qq.magnitude, x * m)
self.assertEqual(qq.units, ureg.meter.units)
qq = m * q
self.assertIsInstance(qq, ureg.Quantity)
np.testing.assert_array_equal(qq.magnitude, x * m)
self.assertEqual(qq.units, ureg.meter.units)
@unittest.expectedFailure
@helpers.requires_numpy()
def test_issue39(self):
x = np.matrix([[1, 2, 3], [1, 2, 3], [1, 2, 3]])
q = ureg.meter * x
self.assertIsInstance(q, ureg.Quantity)
np.testing.assert_array_equal(q.magnitude, x)
self.assertEqual(q.units, ureg.meter.units)
q = x * ureg.meter
self.assertIsInstance(q, ureg.Quantity)
np.testing.assert_array_equal(q.magnitude, x)
self.assertEqual(q.units, ureg.meter.units)
m = np.matrix(2 * np.ones(3, 3))
qq = q * m
self.assertIsInstance(qq, ureg.Quantity)
np.testing.assert_array_equal(qq.magnitude, x * m)
self.assertEqual(qq.units, ureg.meter.units)
qq = m * q
self.assertIsInstance(qq, ureg.Quantity)
np.testing.assert_array_equal(qq.magnitude, x * m)
self.assertEqual(qq.units, ureg.meter.units)
@helpers.requires_numpy()
def test_issue44(self):
x = 4.0 * ureg.dimensionless
np.sqrt(x)
self.assertQuantityAlmostEqual(
np.sqrt([4.0] * ureg.dimensionless), [2.0] * ureg.dimensionless
)
self.assertQuantityAlmostEqual(
np.sqrt(4.0 * ureg.dimensionless), 2.0 * ureg.dimensionless
)
def test_issue45(self):
import math
self.assertAlmostEqual(math.sqrt(4 * ureg.m / ureg.cm), math.sqrt(4 * 100))
self.assertAlmostEqual(float(ureg.V / ureg.mV), 1000.0)
@helpers.requires_numpy()
def test_issue45b(self):
self.assertAlmostEqual(
np.sin([np.pi / 2] * ureg.m / ureg.m),
np.sin([np.pi / 2] * ureg.dimensionless),
)
self.assertAlmostEqual(
np.sin([np.pi / 2] * ureg.cm / ureg.m),
np.sin([np.pi / 2] * ureg.dimensionless * 0.01),
)
def test_issue50(self):
Q_ = ureg.Quantity
self.assertEqual(Q_(100), 100 * ureg.dimensionless)
self.assertEqual(Q_("100"), 100 * ureg.dimensionless)
def test_issue52(self):
u1 = UnitRegistry()
u2 = UnitRegistry()
q1 = 1 * u1.meter
q2 = 1 * u2.meter
import operator as op
for fun in (
op.add,
op.iadd,
op.sub,
op.isub,
op.mul,
op.imul,
op.floordiv,
op.ifloordiv,
op.truediv,
op.itruediv,
):
self.assertRaises(ValueError, fun, q1, q2)
def test_issue54(self):
self.assertEqual((1 * ureg.km / ureg.m + 1).magnitude, 1001)
def test_issue54_related(self):
self.assertEqual(ureg.km / ureg.m, 1000)
self.assertEqual(1000, ureg.km / ureg.m)
self.assertLess(900, ureg.km / ureg.m)
self.assertGreater(1100, ureg.km / ureg.m)
def test_issue61(self):
Q_ = ureg.Quantity
for value in ({}, {"a": 3}, None):
self.assertRaises(TypeError, Q_, value)
self.assertRaises(TypeError, Q_, value, "meter")
self.assertRaises(ValueError, Q_, "", "meter")
self.assertRaises(ValueError, Q_, "")
@helpers.requires_not_numpy()
def test_issue61_notNP(self):
Q_ = ureg.Quantity
for value in ([1, 2, 3], (1, 2, 3)):
self.assertRaises(TypeError, Q_, value)
self.assertRaises(TypeError, Q_, value, "meter")
def test_issue62(self):
m = ureg("m**0.5")
self.assertEqual(str(m.units), "meter ** 0.5")
def test_issue66(self):
self.assertEqual(
ureg.get_dimensionality(UnitsContainer({"[temperature]": 1})),
UnitsContainer({"[temperature]": 1}),
)
self.assertEqual(
ureg.get_dimensionality(ureg.kelvin), UnitsContainer({"[temperature]": 1})
)
self.assertEqual(
ureg.get_dimensionality(ureg.degC), UnitsContainer({"[temperature]": 1})
)
def test_issue66b(self):
self.assertEqual(
ureg.get_base_units(ureg.kelvin),
(1.0, ureg.Unit(UnitsContainer({"kelvin": 1}))),
)
self.assertEqual(
ureg.get_base_units(ureg.degC),
(1.0, ureg.Unit(UnitsContainer({"kelvin": 1}))),
)
def test_issue69(self):
q = ureg("m").to(ureg("in"))
self.assertEqual(q, ureg("m").to("in"))
@helpers.requires_numpy()
def test_issue74(self):
v1 = np.asarray([1.0, 2.0, 3.0])
v2 = np.asarray([3.0, 2.0, 1.0])
q1 = v1 * ureg.ms
q2 = v2 * ureg.ms
np.testing.assert_array_equal(q1 < q2, v1 < v2)
np.testing.assert_array_equal(q1 > q2, v1 > v2)
np.testing.assert_array_equal(q1 <= q2, v1 <= v2)
np.testing.assert_array_equal(q1 >= q2, v1 >= v2)
q2s = np.asarray([0.003, 0.002, 0.001]) * ureg.s
v2s = q2s.to("ms").magnitude
np.testing.assert_array_equal(q1 < q2s, v1 < v2s)
np.testing.assert_array_equal(q1 > q2s, v1 > v2s)
np.testing.assert_array_equal(q1 <= q2s, v1 <= v2s)
np.testing.assert_array_equal(q1 >= q2s, v1 >= v2s)
@helpers.requires_numpy()
def test_issue75(self):
v1 = np.asarray([1.0, 2.0, 3.0])
v2 = np.asarray([3.0, 2.0, 1.0])
q1 = v1 * ureg.ms
q2 = v2 * ureg.ms
np.testing.assert_array_equal(q1 == q2, v1 == v2)
np.testing.assert_array_equal(q1 != q2, v1 != v2)
q2s = np.asarray([0.003, 0.002, 0.001]) * ureg.s
v2s = q2s.to("ms").magnitude
np.testing.assert_array_equal(q1 == q2s, v1 == v2s)
np.testing.assert_array_equal(q1 != q2s, v1 != v2s)
@helpers.requires_uncertainties()
def test_issue77(self):
acc = (5.0 * ureg("m/s/s")).plus_minus(0.25)
tim = (37.0 * ureg("s")).plus_minus(0.16)
dis = acc * tim ** 2 / 2
self.assertEqual(dis.value, acc.value * tim.value ** 2 / 2)
def test_issue85(self):
T = 4.0 * ureg.kelvin
m = 1.0 * ureg.amu
va = 2.0 * ureg.k * T / m
va.to_base_units()
boltmk = 1.380649e-23 * ureg.J / ureg.K
vb = 2.0 * boltmk * T / m
self.assertQuantityAlmostEqual(va.to_base_units(), vb.to_base_units())
def test_issue86(self):
ureg = self.ureg
ureg.autoconvert_offset_to_baseunit = True
def parts(q):
return q.magnitude, q.units
q1 = 10.0 * ureg.degC
q2 = 10.0 * ureg.kelvin
k1 = q1.to_base_units()
q3 = 3.0 * ureg.meter
q1m, q1u = parts(q1)
q2m, q2u = parts(q2)
q3m, q3u = parts(q3)
k1m, k1u = parts(k1)
self.assertEqual(parts(q2 * q3), (q2m * q3m, q2u * q3u))
self.assertEqual(parts(q2 / q3), (q2m / q3m, q2u / q3u))
self.assertEqual(parts(q3 * q2), (q3m * q2m, q3u * q2u))
self.assertEqual(parts(q3 / q2), (q3m / q2m, q3u / q2u))
self.assertEqual(parts(q2 ** 1), (q2m ** 1, q2u ** 1))
self.assertEqual(parts(q2 ** -1), (q2m ** -1, q2u ** -1))
self.assertEqual(parts(q2 ** 2), (q2m ** 2, q2u ** 2))
self.assertEqual(parts(q2 ** -2), (q2m ** -2, q2u ** -2))
self.assertEqual(parts(q1 * q3), (k1m * q3m, k1u * q3u))
self.assertEqual(parts(q1 / q3), (k1m / q3m, k1u / q3u))
self.assertEqual(parts(q3 * q1), (q3m * k1m, q3u * k1u))
self.assertEqual(parts(q3 / q1), (q3m / k1m, q3u / k1u))
self.assertEqual(parts(q1 ** -1), (k1m ** -1, k1u ** -1))
self.assertEqual(parts(q1 ** 2), (k1m ** 2, k1u ** 2))
self.assertEqual(parts(q1 ** -2), (k1m ** -2, k1u ** -2))
def test_issues86b(self):
ureg = self.ureg
T1 = 200.0 * ureg.degC
T2 = T1.to(ureg.kelvin)
m = 132.9054519 * ureg.amu
v1 = 2 * ureg.k * T1 / m
v2 = 2 * ureg.k * T2 / m
self.assertQuantityAlmostEqual(v1, v2)
self.assertQuantityAlmostEqual(v1, v2.to_base_units())
self.assertQuantityAlmostEqual(v1.to_base_units(), v2)
self.assertQuantityAlmostEqual(v1.to_base_units(), v2.to_base_units())
@unittest.expectedFailure
def test_issue86c(self):
ureg = self.ureg
ureg.autoconvert_offset_to_baseunit = True
T = ureg.degC
T = 100.0 * T
self.assertQuantityAlmostEqual(ureg.k * 2 * T, ureg.k * (2 * T))
def test_issue93(self):
x = 5 * ureg.meter
self.assertIsInstance(x.magnitude, int)
y = 0.1 * ureg.meter
self.assertIsInstance(y.magnitude, float)
z = 5 * ureg.meter
self.assertIsInstance(z.magnitude, int)
z += y
self.assertIsInstance(z.magnitude, float)
self.assertQuantityAlmostEqual(x + y, 5.1 * ureg.meter)
self.assertQuantityAlmostEqual(z, 5.1 * ureg.meter)
def test_issue104(self):
x = [ureg("1 meter"), ureg("1 meter"), ureg("1 meter")]
y = [ureg("1 meter")] * 3
def summer(values):
if not values:
return 0
total = values[0]
for v in values[1:]:
total += v
return total
self.assertQuantityAlmostEqual(summer(x), ureg.Quantity(3, "meter"))
self.assertQuantityAlmostEqual(x[0], ureg.Quantity(1, "meter"))
self.assertQuantityAlmostEqual(summer(y), ureg.Quantity(3, "meter"))
self.assertQuantityAlmostEqual(y[0], ureg.Quantity(1, "meter"))
def test_issue105(self):
func = ureg.parse_unit_name
val = list(func("meter"))
self.assertEqual(list(func("METER")), [])
self.assertEqual(val, list(func("METER", False)))
for func in (ureg.get_name, ureg.parse_expression):
val = func("meter")
with self.assertRaises(AttributeError):
func("METER")
self.assertEqual(val, func("METER", False))
@helpers.requires_numpy()
def test_issue127(self):
q = [1.0, 2.0, 3.0, 4.0] * self.ureg.meter
q[0] = np.nan
self.assertNotEqual(q[0], 1.0)
self.assertTrue(math.isnan(q[0].magnitude))
q[1] = float("NaN")
self.assertNotEqual(q[1], 2.0)
self.assertTrue(math.isnan(q[1].magnitude))
def test_issue170(self):
Q_ = UnitRegistry().Quantity
q = Q_("1 kHz") / Q_("100 Hz")
iq = int(q)
self.assertEqual(iq, 10)
self.assertIsInstance(iq, int)
def test_angstrom_creation(self):
ureg.Quantity(2, "Å")
def test_alternative_angstrom_definition(self):
ureg.Quantity(2, "\u212B")
def test_micro_creation(self):
ureg.Quantity(2, "µm")
@helpers.requires_numpy()
def test_issue171_real_imag(self):
qr = [1.0, 2.0, 3.0, 4.0] * self.ureg.meter
qi = [4.0, 3.0, 2.0, 1.0] * self.ureg.meter
q = qr + 1j * qi
self.assertQuantityEqual(q.real, qr)
self.assertQuantityEqual(q.imag, qi)
@helpers.requires_numpy()
def test_issue171_T(self):
a = np.asarray([[1.0, 2.0, 3.0, 4.0], [4.0, 3.0, 2.0, 1.0]])
q1 = a * self.ureg.meter
q2 = a.T * self.ureg.meter
self.assertQuantityEqual(q1.T, q2)
@helpers.requires_numpy()
def test_issue250(self):
a = self.ureg.V
b = self.ureg.mV
self.assertEqual(np.float16(a / b), 1000.0)
self.assertEqual(np.float32(a / b), 1000.0)
self.assertEqual(np.float64(a / b), 1000.0)
if "float128" in dir(np):
self.assertEqual(np.float128(a / b), 1000.0)
def test_issue252(self):
ur = UnitRegistry()
q = ur("3 F")
t = copy.deepcopy(q)
u = t.to(ur.mF)
self.assertQuantityEqual(q.to(ur.mF), u)
def test_issue323(self):
from fractions import Fraction as F
self.assertEqual((self.Q_(F(2, 3), "s")).to("ms"), self.Q_(F(2000, 3), "ms"))
self.assertEqual((self.Q_(F(2, 3), "m")).to("km"), self.Q_(F(1, 1500), "km"))
def test_issue339(self):
q1 = self.ureg("")
self.assertEqual(q1.magnitude, 1)
self.assertEqual(q1.units, self.ureg.dimensionless)
q2 = self.ureg("1 dimensionless")
self.assertEqual(q1, q2)
def test_issue354_356_370(self):
self.assertEqual(
"{:~}".format(1 * self.ureg.second / self.ureg.millisecond), "1.0 s / ms"
)
self.assertEqual("{:~}".format(1 * self.ureg.count), "1 count")
self.assertEqual("{:~}".format(1 * self.ureg("MiB")), "1 MiB")
def test_issue468(self):
@ureg.wraps(("kg"), "meter")
def f(x):
return x
x = ureg.Quantity(1.0, "meter")
y = f(x)
z = x * y
self.assertEqual(z, ureg.Quantity(1.0, "meter * kilogram"))
@helpers.requires_numpy()
def test_issue482(self):
q = self.ureg.Quantity(1, self.ureg.dimensionless)
qe = np.exp(q)
self.assertIsInstance(qe, self.ureg.Quantity)
@helpers.requires_numpy()
def test_issue483(self):
ureg = self.ureg
a = np.asarray([1, 2, 3])
q = [1, 2, 3] * ureg.dimensionless
p = (q ** q).m
np.testing.assert_array_equal(p, a ** a)
def test_issue507(self):
# leading underscore in unit works with numbers
ureg.define("_100km = 100 * kilometer")
battery_ec = 16 * ureg.kWh / ureg._100km # noqa: F841
# ... but not with text
ureg.define("_home = 4700 * kWh / year")
with self.assertRaises(AttributeError):
home_elec_power = 1 * ureg._home # noqa: F841
# ... or with *only* underscores
ureg.define("_ = 45 * km")
with self.assertRaises(AttributeError):
one_blank = 1 * ureg._ # noqa: F841
def test_issue523(self):
src, dst = UnitsContainer({"meter": 1}), UnitsContainer({"degF": 1})
value = 10.0
convert = self.ureg.convert
self.assertRaises(DimensionalityError, convert, value, src, dst)
self.assertRaises(DimensionalityError, convert, value, dst, src)
def test_issue532(self):
ureg = self.ureg
@ureg.check(ureg(""))
def f(x):
return 2 * x
self.assertEqual(f(ureg.Quantity(1, "")), 2)
self.assertRaises(DimensionalityError, f, ureg.Quantity(1, "m"))
def test_issue625a(self):
Q_ = ureg.Quantity
from math import sqrt
@ureg.wraps(ureg.second, (ureg.meters, ureg.meters / ureg.second ** 2))
def calculate_time_to_fall(height, gravity=Q_(9.8, "m/s^2")):
"""Calculate time to fall from a height h with a default gravity.
By default, the gravity is assumed to be earth gravity,
but it can be modified.
d = .5 * g * t**2
t = sqrt(2 * d / g)
Parameters
----------
height :
gravity :
(Default value = Q_(9.8)
"m/s^2") :
Returns
-------
"""
return sqrt(2 * height / gravity)
lunar_module_height = Q_(10, "m")
t1 = calculate_time_to_fall(lunar_module_height)
print(t1)
self.assertAlmostEqual(t1, Q_(1.4285714285714286, "s"))
moon_gravity = Q_(1.625, "m/s^2")
t2 = calculate_time_to_fall(lunar_module_height, moon_gravity)
self.assertAlmostEqual(t2, Q_(3.508232077228117, "s"))
def test_issue625b(self):
Q_ = ureg.Quantity
@ureg.wraps("=A*B", ("=A", "=B"))
def get_displacement(time, rate=Q_(1, "m/s")):
"""Calculates displacement from a duration and default rate.
Parameters
----------
time :
rate :
(Default value = Q_(1)
"m/s") :
Returns
-------
"""
return time * rate
d1 = get_displacement(Q_(2, "s"))
self.assertAlmostEqual(d1, Q_(2, "m"))
d2 = get_displacement(Q_(2, "s"), Q_(1, "deg/s"))
self.assertAlmostEqual(d2, Q_(2, " deg"))
def test_issue625c(self):
u = UnitRegistry()
@u.wraps("=A*B*C", ("=A", "=B", "=C"))
def get_product(a=2 * u.m, b=3 * u.m, c=5 * u.m):
return a * b * c
self.assertEqual(get_product(a=3 * u.m), 45 * u.m ** 3)
self.assertEqual(get_product(b=2 * u.m), 20 * u.m ** 3)
self.assertEqual(get_product(c=1 * u.dimensionless), 6 * u.m ** 2)
def test_issue655a(self):
distance = 1 * ureg.m
time = 1 * ureg.s
velocity = distance / time
self.assertEqual(distance.check("[length]"), True)
self.assertEqual(distance.check("[time]"), False)
self.assertEqual(velocity.check("[length] / [time]"), True)
self.assertEqual(velocity.check("1 / [time] * [length]"), True)
def test_issue655b(self):
Q_ = ureg.Quantity
@ureg.check("[length]", "[length]/[time]^2")
def pendulum_period(length, G=Q_(1, "standard_gravity")):
print(length)
return (2 * math.pi * (length / G) ** 0.5).to("s")
length = Q_(1, ureg.m)
# Assume earth gravity
t = pendulum_period(length)
self.assertAlmostEqual(t, Q_("2.0064092925890407 second"))
# Use moon gravity
moon_gravity = Q_(1.625, "m/s^2")
t = pendulum_period(length, moon_gravity)
self.assertAlmostEqual(t, Q_("4.928936075204336 second"))
def test_issue783(self):
assert not ureg("g") == []
def test_issue856(self):
ph1 = ParserHelper(scale=123)
ph2 = copy.deepcopy(ph1)
assert ph2.scale == ph1.scale
ureg1 = UnitRegistry()
ureg2 = copy.deepcopy(ureg1)
# Very basic functionality test
assert ureg2("1 t").to("kg").magnitude == 1000
def test_issue856b(self):
# Test that, after a deepcopy(), the two UnitRegistries are
# independent from each other
ureg1 = UnitRegistry()
ureg2 = copy.deepcopy(ureg1)
ureg1.define("test123 = 123 kg")
ureg2.define("test123 = 456 kg")
assert ureg1("1 test123").to("kg").magnitude == 123
assert ureg2("1 test123").to("kg").magnitude == 456
def test_issue876(self):
# Same hash must not imply equality.
# As an implementation detail of CPython, hash(-1) == hash(-2).
# This test is useless in potential alternative Python implementations where
# hash(-1) != hash(-2); one would need to find hash collisions specific for each
# implementation
a = UnitsContainer({"[mass]": -1})
b = UnitsContainer({"[mass]": -2})
c = UnitsContainer({"[mass]": -3})
# Guarantee working on alternative Python implementations
assert (hash(-1) == hash(-2)) == (hash(a) == hash(b))
assert (hash(-1) == hash(-3)) == (hash(a) == hash(c))
assert a != b
assert a != c
def test_issue902(self):
ureg = UnitRegistry(auto_reduce_dimensions=True)
velocity = 1 * ureg.m / ureg.s
cross_section = 1 * ureg.um ** 2
result = cross_section / velocity
assert result == 1e-12 * ureg.m * ureg.s
def test_issue912(self):
"""pprint.pformat() invokes sorted() on large sets and frozensets and graciously
handles TypeError, but not generic Exceptions. This test will fail if
pint.DimensionalityError stops being a subclass of TypeError.
Parameters
----------
Returns
-------
"""
meter_units = ureg.get_compatible_units(ureg.meter)
hertz_units = ureg.get_compatible_units(ureg.hertz)
pprint.pformat(meter_units | hertz_units)
def test_issue932(self):
q = ureg.Quantity("1 kg")
with self.assertRaises(DimensionalityError):
q.to("joule")
ureg.enable_contexts("energy", *(Context() for _ in range(20)))
q.to("joule")
ureg.disable_contexts()
with self.assertRaises(DimensionalityError):
q.to("joule")
def test_issue960(self):
q = (1 * ureg.nanometer).to_compact("micrometer")
assert q.units == ureg.nanometer
assert q.magnitude == 1
def test_issue1032(self):
class MultiplicativeDictionary(dict):
def __rmul__(self, other):
return self.__class__(
{key: value * other for key, value in self.items()}
)
q = 3 * ureg.s
d = MultiplicativeDictionary({4: 5, 6: 7})
assert q * d == MultiplicativeDictionary({4: 15 * ureg.s, 6: 21 * ureg.s})
with self.assertRaises(TypeError):
d * q
@helpers.requires_numpy()
def test_issue973(self):
"""Verify that an empty array Quantity can be created through multiplication."""
q0 = np.array([]) * ureg.m # by Unit
q1 = np.array([]) * ureg("m") # by Quantity
assert isinstance(q0, ureg.Quantity)
assert isinstance(q1, ureg.Quantity)
assert len(q0) == len(q1) == 0
def test_issue1062_issue1097(self):
# Must not be used by any other tests
assert "nanometer" not in ureg._units
for i in range(5):
ctx = Context.from_lines(["@context _", "cal = 4 J"])
with ureg.context("sp", ctx):
q = ureg.Quantity(1, "nm")
q.to("J")
def test_issue1086(self):
# units with prefixes should correctly test as 'in' the registry
assert "bits" in ureg
assert "gigabits" in ureg
assert "meters" in ureg
assert "kilometers" in ureg
# unknown or incorrect units should test as 'not in' the registry
assert "magicbits" not in ureg
assert "unknownmeters" not in ureg
assert "gigatrees" not in ureg
def test_issue1112(self):
ureg = UnitRegistry(
"""
m = [length]
g = [mass]
s = [time]
ft = 0.305 m
lb = 454 g
@context c1
[time]->[length] : value * 10 m/s
@end
@context c2
ft = 0.3 m
@end
@context c3
lb = 500 g
@end
""".splitlines()
)
ureg.enable_contexts("c1")
ureg.enable_contexts("c2")
ureg.enable_contexts("c3")
if np is not None:
@pytest.mark.parametrize(
"callable",
[
lambda x: np.sin(x / x.units), # Issue 399
lambda x: np.cos(x / x.units), # Issue 399
np.isfinite, # Issue 481
np.shape, # Issue 509
np.size, # Issue 509
np.sqrt, # Issue 622
lambda x: x.mean(), # Issue 678
lambda x: x.copy(), # Issue 678
np.array,
lambda x: x.conjugate,
],
)
@pytest.mark.parametrize(
"q",
[
pytest.param(ureg.Quantity(1, "m"), id="python scalar int"),
pytest.param(ureg.Quantity([1, 2, 3, 4], "m"), id="array int"),
pytest.param(ureg.Quantity([1], "m")[0], id="numpy scalar int"),
pytest.param(ureg.Quantity(1.0, "m"), id="python scalar float"),
pytest.param(ureg.Quantity([1.0, 2.0, 3.0, 4.0], "m"), id="array float"),
pytest.param(ureg.Quantity([1.0], "m")[0], id="numpy scalar float"),
],
)
def test_issue925(callable, q):
# Test for immutability of type
type_before = type(q._magnitude)
callable(q)
assert isinstance(q._magnitude, type_before)
| 33.369032 | 88 | 0.560806 | import copy
import math
import pprint
import unittest
import pytest
from pint import Context, DimensionalityError, UnitRegistry
from pint.compat import np
from pint.testsuite import QuantityTestCase, helpers
from pint.unit import UnitsContainer
from pint.util import ParserHelper
ureg = UnitRegistry()
class TestIssues(QuantityTestCase):
FORCE_NDARRAY = False
def setup(self):
self.ureg.autoconvert_offset_to_baseunit = False
@unittest.expectedFailure
def test_issue25(self):
x = ParserHelper.from_string("10 %")
self.assertEqual(x, ParserHelper(10, {"%": 1}))
x = ParserHelper.from_string("10 ‰")
self.assertEqual(x, ParserHelper(10, {"‰": 1}))
ureg.define("percent = [fraction]; offset: 0 = %")
ureg.define("permille = percent / 10 = ‰")
x = ureg.parse_expression("10 %")
self.assertEqual(x, ureg.Quantity(10, {"%": 1}))
y = ureg.parse_expression("10 ‰")
self.assertEqual(y, ureg.Quantity(10, {"‰": 1}))
self.assertEqual(x.to("‰"), ureg.Quantity(1, {"‰": 1}))
def test_issue29(self):
t = 4 * ureg("mW")
self.assertEqual(t.magnitude, 4)
self.assertEqual(t._units, UnitsContainer(milliwatt=1))
self.assertEqual(t.to("joule / second"), 4e-3 * ureg("W"))
@unittest.expectedFailure
@helpers.requires_numpy()
def test_issue37(self):
x = np.ma.masked_array([1, 2, 3], mask=[True, True, False])
q = ureg.meter * x
self.assertIsInstance(q, ureg.Quantity)
np.testing.assert_array_equal(q.magnitude, x)
self.assertEqual(q.units, ureg.meter.units)
q = x * ureg.meter
self.assertIsInstance(q, ureg.Quantity)
np.testing.assert_array_equal(q.magnitude, x)
self.assertEqual(q.units, ureg.meter.units)
m = np.ma.masked_array(2 * np.ones(3, 3))
qq = q * m
self.assertIsInstance(qq, ureg.Quantity)
np.testing.assert_array_equal(qq.magnitude, x * m)
self.assertEqual(qq.units, ureg.meter.units)
qq = m * q
self.assertIsInstance(qq, ureg.Quantity)
np.testing.assert_array_equal(qq.magnitude, x * m)
self.assertEqual(qq.units, ureg.meter.units)
@unittest.expectedFailure
@helpers.requires_numpy()
def test_issue39(self):
x = np.matrix([[1, 2, 3], [1, 2, 3], [1, 2, 3]])
q = ureg.meter * x
self.assertIsInstance(q, ureg.Quantity)
np.testing.assert_array_equal(q.magnitude, x)
self.assertEqual(q.units, ureg.meter.units)
q = x * ureg.meter
self.assertIsInstance(q, ureg.Quantity)
np.testing.assert_array_equal(q.magnitude, x)
self.assertEqual(q.units, ureg.meter.units)
m = np.matrix(2 * np.ones(3, 3))
qq = q * m
self.assertIsInstance(qq, ureg.Quantity)
np.testing.assert_array_equal(qq.magnitude, x * m)
self.assertEqual(qq.units, ureg.meter.units)
qq = m * q
self.assertIsInstance(qq, ureg.Quantity)
np.testing.assert_array_equal(qq.magnitude, x * m)
self.assertEqual(qq.units, ureg.meter.units)
@helpers.requires_numpy()
def test_issue44(self):
x = 4.0 * ureg.dimensionless
np.sqrt(x)
self.assertQuantityAlmostEqual(
np.sqrt([4.0] * ureg.dimensionless), [2.0] * ureg.dimensionless
)
self.assertQuantityAlmostEqual(
np.sqrt(4.0 * ureg.dimensionless), 2.0 * ureg.dimensionless
)
def test_issue45(self):
import math
self.assertAlmostEqual(math.sqrt(4 * ureg.m / ureg.cm), math.sqrt(4 * 100))
self.assertAlmostEqual(float(ureg.V / ureg.mV), 1000.0)
@helpers.requires_numpy()
def test_issue45b(self):
self.assertAlmostEqual(
np.sin([np.pi / 2] * ureg.m / ureg.m),
np.sin([np.pi / 2] * ureg.dimensionless),
)
self.assertAlmostEqual(
np.sin([np.pi / 2] * ureg.cm / ureg.m),
np.sin([np.pi / 2] * ureg.dimensionless * 0.01),
)
def test_issue50(self):
Q_ = ureg.Quantity
self.assertEqual(Q_(100), 100 * ureg.dimensionless)
self.assertEqual(Q_("100"), 100 * ureg.dimensionless)
def test_issue52(self):
u1 = UnitRegistry()
u2 = UnitRegistry()
q1 = 1 * u1.meter
q2 = 1 * u2.meter
import operator as op
for fun in (
op.add,
op.iadd,
op.sub,
op.isub,
op.mul,
op.imul,
op.floordiv,
op.ifloordiv,
op.truediv,
op.itruediv,
):
self.assertRaises(ValueError, fun, q1, q2)
def test_issue54(self):
self.assertEqual((1 * ureg.km / ureg.m + 1).magnitude, 1001)
def test_issue54_related(self):
self.assertEqual(ureg.km / ureg.m, 1000)
self.assertEqual(1000, ureg.km / ureg.m)
self.assertLess(900, ureg.km / ureg.m)
self.assertGreater(1100, ureg.km / ureg.m)
def test_issue61(self):
Q_ = ureg.Quantity
for value in ({}, {"a": 3}, None):
self.assertRaises(TypeError, Q_, value)
self.assertRaises(TypeError, Q_, value, "meter")
self.assertRaises(ValueError, Q_, "", "meter")
self.assertRaises(ValueError, Q_, "")
@helpers.requires_not_numpy()
def test_issue61_notNP(self):
Q_ = ureg.Quantity
for value in ([1, 2, 3], (1, 2, 3)):
self.assertRaises(TypeError, Q_, value)
self.assertRaises(TypeError, Q_, value, "meter")
def test_issue62(self):
m = ureg("m**0.5")
self.assertEqual(str(m.units), "meter ** 0.5")
def test_issue66(self):
self.assertEqual(
ureg.get_dimensionality(UnitsContainer({"[temperature]": 1})),
UnitsContainer({"[temperature]": 1}),
)
self.assertEqual(
ureg.get_dimensionality(ureg.kelvin), UnitsContainer({"[temperature]": 1})
)
self.assertEqual(
ureg.get_dimensionality(ureg.degC), UnitsContainer({"[temperature]": 1})
)
def test_issue66b(self):
self.assertEqual(
ureg.get_base_units(ureg.kelvin),
(1.0, ureg.Unit(UnitsContainer({"kelvin": 1}))),
)
self.assertEqual(
ureg.get_base_units(ureg.degC),
(1.0, ureg.Unit(UnitsContainer({"kelvin": 1}))),
)
def test_issue69(self):
q = ureg("m").to(ureg("in"))
self.assertEqual(q, ureg("m").to("in"))
@helpers.requires_numpy()
def test_issue74(self):
v1 = np.asarray([1.0, 2.0, 3.0])
v2 = np.asarray([3.0, 2.0, 1.0])
q1 = v1 * ureg.ms
q2 = v2 * ureg.ms
np.testing.assert_array_equal(q1 < q2, v1 < v2)
np.testing.assert_array_equal(q1 > q2, v1 > v2)
np.testing.assert_array_equal(q1 <= q2, v1 <= v2)
np.testing.assert_array_equal(q1 >= q2, v1 >= v2)
q2s = np.asarray([0.003, 0.002, 0.001]) * ureg.s
v2s = q2s.to("ms").magnitude
np.testing.assert_array_equal(q1 < q2s, v1 < v2s)
np.testing.assert_array_equal(q1 > q2s, v1 > v2s)
np.testing.assert_array_equal(q1 <= q2s, v1 <= v2s)
np.testing.assert_array_equal(q1 >= q2s, v1 >= v2s)
@helpers.requires_numpy()
def test_issue75(self):
v1 = np.asarray([1.0, 2.0, 3.0])
v2 = np.asarray([3.0, 2.0, 1.0])
q1 = v1 * ureg.ms
q2 = v2 * ureg.ms
np.testing.assert_array_equal(q1 == q2, v1 == v2)
np.testing.assert_array_equal(q1 != q2, v1 != v2)
q2s = np.asarray([0.003, 0.002, 0.001]) * ureg.s
v2s = q2s.to("ms").magnitude
np.testing.assert_array_equal(q1 == q2s, v1 == v2s)
np.testing.assert_array_equal(q1 != q2s, v1 != v2s)
@helpers.requires_uncertainties()
def test_issue77(self):
acc = (5.0 * ureg("m/s/s")).plus_minus(0.25)
tim = (37.0 * ureg("s")).plus_minus(0.16)
dis = acc * tim ** 2 / 2
self.assertEqual(dis.value, acc.value * tim.value ** 2 / 2)
def test_issue85(self):
T = 4.0 * ureg.kelvin
m = 1.0 * ureg.amu
va = 2.0 * ureg.k * T / m
va.to_base_units()
boltmk = 1.380649e-23 * ureg.J / ureg.K
vb = 2.0 * boltmk * T / m
self.assertQuantityAlmostEqual(va.to_base_units(), vb.to_base_units())
def test_issue86(self):
ureg = self.ureg
ureg.autoconvert_offset_to_baseunit = True
def parts(q):
return q.magnitude, q.units
q1 = 10.0 * ureg.degC
q2 = 10.0 * ureg.kelvin
k1 = q1.to_base_units()
q3 = 3.0 * ureg.meter
q1m, q1u = parts(q1)
q2m, q2u = parts(q2)
q3m, q3u = parts(q3)
k1m, k1u = parts(k1)
self.assertEqual(parts(q2 * q3), (q2m * q3m, q2u * q3u))
self.assertEqual(parts(q2 / q3), (q2m / q3m, q2u / q3u))
self.assertEqual(parts(q3 * q2), (q3m * q2m, q3u * q2u))
self.assertEqual(parts(q3 / q2), (q3m / q2m, q3u / q2u))
self.assertEqual(parts(q2 ** 1), (q2m ** 1, q2u ** 1))
self.assertEqual(parts(q2 ** -1), (q2m ** -1, q2u ** -1))
self.assertEqual(parts(q2 ** 2), (q2m ** 2, q2u ** 2))
self.assertEqual(parts(q2 ** -2), (q2m ** -2, q2u ** -2))
self.assertEqual(parts(q1 * q3), (k1m * q3m, k1u * q3u))
self.assertEqual(parts(q1 / q3), (k1m / q3m, k1u / q3u))
self.assertEqual(parts(q3 * q1), (q3m * k1m, q3u * k1u))
self.assertEqual(parts(q3 / q1), (q3m / k1m, q3u / k1u))
self.assertEqual(parts(q1 ** -1), (k1m ** -1, k1u ** -1))
self.assertEqual(parts(q1 ** 2), (k1m ** 2, k1u ** 2))
self.assertEqual(parts(q1 ** -2), (k1m ** -2, k1u ** -2))
def test_issues86b(self):
ureg = self.ureg
T1 = 200.0 * ureg.degC
T2 = T1.to(ureg.kelvin)
m = 132.9054519 * ureg.amu
v1 = 2 * ureg.k * T1 / m
v2 = 2 * ureg.k * T2 / m
self.assertQuantityAlmostEqual(v1, v2)
self.assertQuantityAlmostEqual(v1, v2.to_base_units())
self.assertQuantityAlmostEqual(v1.to_base_units(), v2)
self.assertQuantityAlmostEqual(v1.to_base_units(), v2.to_base_units())
@unittest.expectedFailure
def test_issue86c(self):
ureg = self.ureg
ureg.autoconvert_offset_to_baseunit = True
T = ureg.degC
T = 100.0 * T
self.assertQuantityAlmostEqual(ureg.k * 2 * T, ureg.k * (2 * T))
def test_issue93(self):
x = 5 * ureg.meter
self.assertIsInstance(x.magnitude, int)
y = 0.1 * ureg.meter
self.assertIsInstance(y.magnitude, float)
z = 5 * ureg.meter
self.assertIsInstance(z.magnitude, int)
z += y
self.assertIsInstance(z.magnitude, float)
self.assertQuantityAlmostEqual(x + y, 5.1 * ureg.meter)
self.assertQuantityAlmostEqual(z, 5.1 * ureg.meter)
def test_issue104(self):
x = [ureg("1 meter"), ureg("1 meter"), ureg("1 meter")]
y = [ureg("1 meter")] * 3
def summer(values):
if not values:
return 0
total = values[0]
for v in values[1:]:
total += v
return total
self.assertQuantityAlmostEqual(summer(x), ureg.Quantity(3, "meter"))
self.assertQuantityAlmostEqual(x[0], ureg.Quantity(1, "meter"))
self.assertQuantityAlmostEqual(summer(y), ureg.Quantity(3, "meter"))
self.assertQuantityAlmostEqual(y[0], ureg.Quantity(1, "meter"))
def test_issue105(self):
func = ureg.parse_unit_name
val = list(func("meter"))
self.assertEqual(list(func("METER")), [])
self.assertEqual(val, list(func("METER", False)))
for func in (ureg.get_name, ureg.parse_expression):
val = func("meter")
with self.assertRaises(AttributeError):
func("METER")
self.assertEqual(val, func("METER", False))
@helpers.requires_numpy()
def test_issue127(self):
q = [1.0, 2.0, 3.0, 4.0] * self.ureg.meter
q[0] = np.nan
self.assertNotEqual(q[0], 1.0)
self.assertTrue(math.isnan(q[0].magnitude))
q[1] = float("NaN")
self.assertNotEqual(q[1], 2.0)
self.assertTrue(math.isnan(q[1].magnitude))
def test_issue170(self):
Q_ = UnitRegistry().Quantity
q = Q_("1 kHz") / Q_("100 Hz")
iq = int(q)
self.assertEqual(iq, 10)
self.assertIsInstance(iq, int)
def test_angstrom_creation(self):
ureg.Quantity(2, "Å")
def test_alternative_angstrom_definition(self):
ureg.Quantity(2, "\u212B")
def test_micro_creation(self):
ureg.Quantity(2, "µm")
@helpers.requires_numpy()
def test_issue171_real_imag(self):
qr = [1.0, 2.0, 3.0, 4.0] * self.ureg.meter
qi = [4.0, 3.0, 2.0, 1.0] * self.ureg.meter
q = qr + 1j * qi
self.assertQuantityEqual(q.real, qr)
self.assertQuantityEqual(q.imag, qi)
@helpers.requires_numpy()
def test_issue171_T(self):
a = np.asarray([[1.0, 2.0, 3.0, 4.0], [4.0, 3.0, 2.0, 1.0]])
q1 = a * self.ureg.meter
q2 = a.T * self.ureg.meter
self.assertQuantityEqual(q1.T, q2)
@helpers.requires_numpy()
def test_issue250(self):
a = self.ureg.V
b = self.ureg.mV
self.assertEqual(np.float16(a / b), 1000.0)
self.assertEqual(np.float32(a / b), 1000.0)
self.assertEqual(np.float64(a / b), 1000.0)
if "float128" in dir(np):
self.assertEqual(np.float128(a / b), 1000.0)
def test_issue252(self):
ur = UnitRegistry()
q = ur("3 F")
t = copy.deepcopy(q)
u = t.to(ur.mF)
self.assertQuantityEqual(q.to(ur.mF), u)
def test_issue323(self):
from fractions import Fraction as F
self.assertEqual((self.Q_(F(2, 3), "s")).to("ms"), self.Q_(F(2000, 3), "ms"))
self.assertEqual((self.Q_(F(2, 3), "m")).to("km"), self.Q_(F(1, 1500), "km"))
def test_issue339(self):
q1 = self.ureg("")
self.assertEqual(q1.magnitude, 1)
self.assertEqual(q1.units, self.ureg.dimensionless)
q2 = self.ureg("1 dimensionless")
self.assertEqual(q1, q2)
def test_issue354_356_370(self):
self.assertEqual(
"{:~}".format(1 * self.ureg.second / self.ureg.millisecond), "1.0 s / ms"
)
self.assertEqual("{:~}".format(1 * self.ureg.count), "1 count")
self.assertEqual("{:~}".format(1 * self.ureg("MiB")), "1 MiB")
def test_issue468(self):
@ureg.wraps(("kg"), "meter")
def f(x):
return x
x = ureg.Quantity(1.0, "meter")
y = f(x)
z = x * y
self.assertEqual(z, ureg.Quantity(1.0, "meter * kilogram"))
@helpers.requires_numpy()
def test_issue482(self):
q = self.ureg.Quantity(1, self.ureg.dimensionless)
qe = np.exp(q)
self.assertIsInstance(qe, self.ureg.Quantity)
@helpers.requires_numpy()
def test_issue483(self):
ureg = self.ureg
a = np.asarray([1, 2, 3])
q = [1, 2, 3] * ureg.dimensionless
p = (q ** q).m
np.testing.assert_array_equal(p, a ** a)
def test_issue507(self):
ureg.define("_100km = 100 * kilometer")
battery_ec = 16 * ureg.kWh / ureg._100km
ureg.define("_home = 4700 * kWh / year")
with self.assertRaises(AttributeError):
home_elec_power = 1 * ureg._home
ureg.define("_ = 45 * km")
with self.assertRaises(AttributeError):
one_blank = 1 * ureg._
def test_issue523(self):
src, dst = UnitsContainer({"meter": 1}), UnitsContainer({"degF": 1})
value = 10.0
convert = self.ureg.convert
self.assertRaises(DimensionalityError, convert, value, src, dst)
self.assertRaises(DimensionalityError, convert, value, dst, src)
def test_issue532(self):
ureg = self.ureg
@ureg.check(ureg(""))
def f(x):
return 2 * x
self.assertEqual(f(ureg.Quantity(1, "")), 2)
self.assertRaises(DimensionalityError, f, ureg.Quantity(1, "m"))
def test_issue625a(self):
Q_ = ureg.Quantity
from math import sqrt
@ureg.wraps(ureg.second, (ureg.meters, ureg.meters / ureg.second ** 2))
def calculate_time_to_fall(height, gravity=Q_(9.8, "m/s^2")):
return sqrt(2 * height / gravity)
lunar_module_height = Q_(10, "m")
t1 = calculate_time_to_fall(lunar_module_height)
print(t1)
self.assertAlmostEqual(t1, Q_(1.4285714285714286, "s"))
moon_gravity = Q_(1.625, "m/s^2")
t2 = calculate_time_to_fall(lunar_module_height, moon_gravity)
self.assertAlmostEqual(t2, Q_(3.508232077228117, "s"))
def test_issue625b(self):
Q_ = ureg.Quantity
@ureg.wraps("=A*B", ("=A", "=B"))
def get_displacement(time, rate=Q_(1, "m/s")):
return time * rate
d1 = get_displacement(Q_(2, "s"))
self.assertAlmostEqual(d1, Q_(2, "m"))
d2 = get_displacement(Q_(2, "s"), Q_(1, "deg/s"))
self.assertAlmostEqual(d2, Q_(2, " deg"))
def test_issue625c(self):
u = UnitRegistry()
@u.wraps("=A*B*C", ("=A", "=B", "=C"))
def get_product(a=2 * u.m, b=3 * u.m, c=5 * u.m):
return a * b * c
self.assertEqual(get_product(a=3 * u.m), 45 * u.m ** 3)
self.assertEqual(get_product(b=2 * u.m), 20 * u.m ** 3)
self.assertEqual(get_product(c=1 * u.dimensionless), 6 * u.m ** 2)
def test_issue655a(self):
distance = 1 * ureg.m
time = 1 * ureg.s
velocity = distance / time
self.assertEqual(distance.check("[length]"), True)
self.assertEqual(distance.check("[time]"), False)
self.assertEqual(velocity.check("[length] / [time]"), True)
self.assertEqual(velocity.check("1 / [time] * [length]"), True)
def test_issue655b(self):
Q_ = ureg.Quantity
@ureg.check("[length]", "[length]/[time]^2")
def pendulum_period(length, G=Q_(1, "standard_gravity")):
print(length)
return (2 * math.pi * (length / G) ** 0.5).to("s")
length = Q_(1, ureg.m)
t = pendulum_period(length)
self.assertAlmostEqual(t, Q_("2.0064092925890407 second"))
moon_gravity = Q_(1.625, "m/s^2")
t = pendulum_period(length, moon_gravity)
self.assertAlmostEqual(t, Q_("4.928936075204336 second"))
def test_issue783(self):
assert not ureg("g") == []
def test_issue856(self):
ph1 = ParserHelper(scale=123)
ph2 = copy.deepcopy(ph1)
assert ph2.scale == ph1.scale
ureg1 = UnitRegistry()
ureg2 = copy.deepcopy(ureg1)
assert ureg2("1 t").to("kg").magnitude == 1000
def test_issue856b(self):
ureg1 = UnitRegistry()
ureg2 = copy.deepcopy(ureg1)
ureg1.define("test123 = 123 kg")
ureg2.define("test123 = 456 kg")
assert ureg1("1 test123").to("kg").magnitude == 123
assert ureg2("1 test123").to("kg").magnitude == 456
def test_issue876(self):
a = UnitsContainer({"[mass]": -1})
b = UnitsContainer({"[mass]": -2})
c = UnitsContainer({"[mass]": -3})
assert (hash(-1) == hash(-2)) == (hash(a) == hash(b))
assert (hash(-1) == hash(-3)) == (hash(a) == hash(c))
assert a != b
assert a != c
def test_issue902(self):
ureg = UnitRegistry(auto_reduce_dimensions=True)
velocity = 1 * ureg.m / ureg.s
cross_section = 1 * ureg.um ** 2
result = cross_section / velocity
assert result == 1e-12 * ureg.m * ureg.s
def test_issue912(self):
meter_units = ureg.get_compatible_units(ureg.meter)
hertz_units = ureg.get_compatible_units(ureg.hertz)
pprint.pformat(meter_units | hertz_units)
def test_issue932(self):
q = ureg.Quantity("1 kg")
with self.assertRaises(DimensionalityError):
q.to("joule")
ureg.enable_contexts("energy", *(Context() for _ in range(20)))
q.to("joule")
ureg.disable_contexts()
with self.assertRaises(DimensionalityError):
q.to("joule")
def test_issue960(self):
q = (1 * ureg.nanometer).to_compact("micrometer")
assert q.units == ureg.nanometer
assert q.magnitude == 1
def test_issue1032(self):
class MultiplicativeDictionary(dict):
def __rmul__(self, other):
return self.__class__(
{key: value * other for key, value in self.items()}
)
q = 3 * ureg.s
d = MultiplicativeDictionary({4: 5, 6: 7})
assert q * d == MultiplicativeDictionary({4: 15 * ureg.s, 6: 21 * ureg.s})
with self.assertRaises(TypeError):
d * q
@helpers.requires_numpy()
def test_issue973(self):
q0 = np.array([]) * ureg.m
q1 = np.array([]) * ureg("m")
assert isinstance(q0, ureg.Quantity)
assert isinstance(q1, ureg.Quantity)
assert len(q0) == len(q1) == 0
def test_issue1062_issue1097(self):
assert "nanometer" not in ureg._units
for i in range(5):
ctx = Context.from_lines(["@context _", "cal = 4 J"])
with ureg.context("sp", ctx):
q = ureg.Quantity(1, "nm")
q.to("J")
def test_issue1086(self):
assert "bits" in ureg
assert "gigabits" in ureg
assert "meters" in ureg
assert "kilometers" in ureg
assert "magicbits" not in ureg
assert "unknownmeters" not in ureg
assert "gigatrees" not in ureg
def test_issue1112(self):
ureg = UnitRegistry(
"""
m = [length]
g = [mass]
s = [time]
ft = 0.305 m
lb = 454 g
@context c1
[time]->[length] : value * 10 m/s
@end
@context c2
ft = 0.3 m
@end
@context c3
lb = 500 g
@end
""".splitlines()
)
ureg.enable_contexts("c1")
ureg.enable_contexts("c2")
ureg.enable_contexts("c3")
if np is not None:
@pytest.mark.parametrize(
"callable",
[
lambda x: np.sin(x / x.units),
lambda x: np.cos(x / x.units),
np.isfinite,
np.shape,
np.size,
np.sqrt,
lambda x: x.mean(),
lambda x: x.copy(),
np.array,
lambda x: x.conjugate,
],
)
@pytest.mark.parametrize(
"q",
[
pytest.param(ureg.Quantity(1, "m"), id="python scalar int"),
pytest.param(ureg.Quantity([1, 2, 3, 4], "m"), id="array int"),
pytest.param(ureg.Quantity([1], "m")[0], id="numpy scalar int"),
pytest.param(ureg.Quantity(1.0, "m"), id="python scalar float"),
pytest.param(ureg.Quantity([1.0, 2.0, 3.0, 4.0], "m"), id="array float"),
pytest.param(ureg.Quantity([1.0], "m")[0], id="numpy scalar float"),
],
)
def test_issue925(callable, q):
type_before = type(q._magnitude)
callable(q)
assert isinstance(q._magnitude, type_before)
| true | true |
f71ce2275013c6a304e74eeda9837d9d22b586d7 | 16,664 | py | Python | MaxiNet/WorkerServer/server.py | richartkeil/MaxiNet | f02524ad131fb0464e35e35a05c5b6d1a457cf8d | [
"MIT"
] | 81 | 2015-04-07T10:17:42.000Z | 2022-03-24T17:24:59.000Z | MaxiNet/WorkerServer/server.py | richartkeil/MaxiNet | f02524ad131fb0464e35e35a05c5b6d1a457cf8d | [
"MIT"
] | 44 | 2015-03-10T13:23:07.000Z | 2022-01-08T12:48:40.000Z | MaxiNet/WorkerServer/server.py | richartkeil/MaxiNet | f02524ad131fb0464e35e35a05c5b6d1a457cf8d | [
"MIT"
] | 46 | 2015-12-06T13:21:23.000Z | 2022-02-19T01:27:05.000Z | #!/usr/bin/python2
import argparse
import atexit
import logging
import os
import signal
import subprocess
import sys
import tempfile
import time
from mininet.node import UserSwitch, OVSSwitch
from mininet.link import Link, TCIntf
import mininet.term
import Pyro4
import threading
import traceback
from MaxiNet.tools import Tools, MaxiNetConfig
from MaxiNet.WorkerServer.ssh_manager import SSH_Manager
try:
from mininet.net import Containernet as Mininet
except ImportError:
from mininet.net import Mininet
class WorkerServer(object):
"""Manages the Worker
The WorkerServer class connects to the nameserver and registers
itself with the MaxiNetManager instance. It is used by the Cluster instances
to start mininet instances, manage the ssh daemon and run commands etc.
Attributes:
logger: logging instance
mnManager: instance of class MininetManager which is used to create mininet
instances
sshManager: instance of class SSH_Manager which is used to manage the ssh
daemon.
ssh_folder: folder which holds configuration files for the ssh daemon.
ip: ip address of Worker
"""
def __init__(self):
self._ns = None
self._pyrodaemon = None
self.logger = logging.getLogger(__name__)
self._manager = None
self.mnManager = MininetManager()
self.sshManager = None
self.ssh_folder = tempfile.mkdtemp()
atexit.register(subprocess.call, ["rm", "-rf", self.ssh_folder])
logging.basicConfig(level=logging.DEBUG)
self.ip = None
self._shutdown = False
#Pyro4.config.COMMTIMEOUT = 2
#for frontend
self._ip = None
self._port = None
self._password = None
self._looping_thread = None
def exit_handler(self, signal, frame):
# I have absolutely no clue why but without this print atexit sometimes
# doesn't seem to wait for called functions to finish...
print "exiting..."
self._shutdown = True
sys.exit()
@Pyro4.expose
def monitorFrontend(self):
""" function to monitor if the frontend is still alive.
if not, try to reconnect.
"""
while(not self._shutdown):
try:
self._manager.getStatus()
except:
if self._ip != None:
#self.ip as an indicator that this worker was connected to the frontend once.
print "Trying to reconnect to FrontendServer..."
try:
try:
self._pyrodaemon.unregister(self)
except:
pass
try:
self._pyrodaemon.unregister(self.mnManager)
except:
pass
try:
self._pyrodaemon.unregister(self.sshManager)
except:
pass
try:
self._pyrodaemon.shutdown()
except:
pass
try:
self._pyrodaemon.close()
except:
pass
self.start(self._ip, self._port, self._password)
except Exception as e:
traceback.print_exc(e)
pass
pass
time.sleep(5)
@Pyro4.expose
def start(self, ip, port, password, retry=float("inf")):
"""Start WorkerServer and ssh daemon and connect to nameserver."""
self.logger.info("starting up and connecting to %s:%d"
% (ip, port))
#store for reconnection attempts
self._ip = ip
self._port = port
self._password = password
#Pyro4.config.HMAC_KEY = password
tries=1
self._ns = None
while not self._ns:
try:
self._ns = Pyro4.locateNS(ip, port, hmac_key=password)
except Pyro4.errors.NamingError:
if tries < retry:
self.logger.warn("Unable to locate Nameserver. Trying again in 5 seconds...")
time.sleep(5)
tries += 1
else:
self.logger.error("Unable to locate Nameserver.")
sys.exit()
self.config = Pyro4.Proxy(self._ns.lookup("config"))
self.config._pyroHmacKey=password
self.ip = self.config.get_worker_ip(self.get_hostname())
if(not self.ip):
self.ip = Tools.guess_ip()
if not self.config.has_section(self.get_hostname()):
self.config.add_section(self.get_hostname())
self.config.set(self.get_hostname(), "ip", self.ip)
self.logger.warn("""FrontendServer did not know IP of this host (check configuration for hostname).
Guessed: %s""" % self.ip)
self.logger.info("configuring and starting ssh daemon...")
self.sshManager = SSH_Manager(folder=self.ssh_folder, ip=self.ip, port=self.config.get_sshd_port(), user=self.config.get("all", "sshuser"))
self.sshManager.start_sshd()
self._pyrodaemon = Pyro4.Daemon(host=self.ip)
self._pyrodaemon._pyroHmacKey=password
uri = self._pyrodaemon.register(self)
self._ns.register(self._get_pyroname(), uri)
uri = self._pyrodaemon.register(self.mnManager)
self._ns.register(self._get_pyroname()+".mnManager", uri)
uri = self._pyrodaemon.register(self.sshManager)
self._ns.register(self._get_pyroname()+".sshManager", uri)
atexit.register(self._stop)
self.logger.info("looking for manager application...")
manager_uri = self._ns.lookup("MaxiNetManager")
if(manager_uri):
self._manager = Pyro4.Proxy(manager_uri)
self._manager._pyroHmacKey=self._password
self.logger.info("signing in...")
if(self._manager.worker_signin(self._get_pyroname(), self.get_hostname())):
self.logger.info("done. Entering requestloop.")
self._started = True
self._looping_thread = threading.Thread(target=self._pyrodaemon.requestLoop)
self._looping_thread.daemon = True
self._looping_thread.start()
else:
self.logger.error("signin failed.")
else:
self.logger.error("no manager found.")
def _get_pyroname(self):
return "MaxiNetWorker_%s" % self.get_hostname()
@Pyro4.expose
def get_hostname(self):
return subprocess.check_output(["hostname"]).strip()
def _stop(self):
self.logger.info("signing out...")
if(self._manager):
self._manager.worker_signout(self.get_hostname())
self.logger.info("shutting down...")
self._ns.remove(self._get_pyroname())
self._ns.remove(self._get_pyroname()+".mnManager")
self._pyrodaemon.unregister(self)
self._pyrodaemon.unregister(self.mnManager)
self._pyrodaemon.unregister(self.sshManager)
self._pyrodaemon.shutdown()
self._pyrodaemon.close()
@Pyro4.expose
def remoteShutdown(self):
self._pyrodaemon.shutdown()
@Pyro4.expose
def stop(self):
(signedin, assigned) = self._manager.get_worker_status(self.get_hostname())
if(assigned):
self.logger.warn("can't shut down as worker is still assigned to id %d" % assigned)
return False
else:
self._stop()
return True
@Pyro4.expose
def check_output(self, cmd):
"""Run cmd on Worker and return output
Args:
cmd: command to call with optional parameters
Returns:
Shell output of command
"""
self.logger.debug("Executing %s" % cmd)
return subprocess.check_output(cmd, shell=True,
stderr=subprocess.STDOUT).strip()
@Pyro4.expose
def script_check_output(self, cmd):
"""Call MaxiNet Script and return output
Args:
cmd: name of script to call
Returns:
Shell output of script
"""
# Prefix command by our worker directory
cmd = Tools.get_script_dir() + cmd
return self.check_output(cmd)
@Pyro4.expose
def run_cmd(self, command):
"""Call command (blocking)
Args:
command: command to call with optional parameters
"""
subprocess.call(command, shell=True)
@Pyro4.expose
def daemonize(self, cmd):
"""Call command (non-blocking)
Args:
command: command to call with optional parameters
"""
p = subprocess.Popen(cmd, shell=True)
atexit.register(p.terminate)
@Pyro4.expose
def daemonize_script(self, script, args):
"""Call MaxiNet Script (non-blocking)
Args:
cmd: name of script to call
"""
cmd = Tools.get_script_dir()+script+" "+args
p = subprocess.Popen(cmd, shell=True)
atexit.register(p.terminate)
class TCLinkParams(Link):
"""Link with symmetric TC interfaces
Like the mininet TCLink class but with support of the params1
and params2 arguments.
"""
def __init__(self, node1, node2, port1=None, port2=None,
intfName1=None, intfName2=None,
addr1=None, addr2=None, params1=None,
params2=None, **kvargs):
Link.__init__(self, node1, node2, port1=port1, port2=port2,
intfName1=intfName1, intfName2=intfName2,
cls1=TCIntf,
cls2=TCIntf,
addr1=addr1, addr2=addr2,
params1=params1,
params2=params2)
class MininetManager(object):
def __init__(self):
self.logger = logging.getLogger(__name__)
self.net = None
@Pyro4.expose
def create_mininet(self, topo, tunnels=[], switch=UserSwitch,
controller=None, STT=False):
if(not self.net is None):
self.logger.warn("running mininet instance detected!\
Shutting it down...")
self.destroy_mininet()
self.logger.info("Creating mininet instance")
try:
if controller:
self.net = Mininet(topo=topo, intf=TCIntf, link=TCLinkParams,
switch=switch, controller=controller)
else:
self.net = Mininet(topo=topo, intf=TCIntf, link=TCLinkParams,
switch=switch)
except Exception, e:
self.logger.error("Failed to create mininet instance: %s" % traceback.format_exc())
raise e
if STT:
self.logger.info("Starting Mininet...")
self.net.start()
self.logger.info("Adding tunnels to mininet instance")
for tunnel in tunnels:
port = None
cls = None
if "node1" not in tunnel[2].keys():
self.logger.info("Error! node1 is missing in tunnel metadata")
if tunnel[2]["node1"] in topo.nodes():
port = tunnel[2]["port1"]
else:
port = tunnel[2]["port2"]
if "cls" in tunnel[2].keys():
cls = tunnel[2]["cls"]
del tunnel[2]["cls"]
self.addTunnel(tunnel[0], tunnel[1], port, cls, STT=STT, **tunnel[2])
if not STT:
self.logger.info("Starting Mininet...")
self.net.start()
self.logger.info("Startup complete.")
self.x11popens = []
return True
@Pyro4.expose
def destroy_mininet(self):
"""shut down mininet instance"""
if self.net:
for popen in self.x11popens:
popen.terminate()
popen.communicate()
popen.wait()
self.net.stop()
self.logger.info("mininet instance terminated")
self.net = None
@Pyro4.expose
def configLinkStatus(self, src, dst, status):
self.net.configLinkStatus(src, dst, status)
@Pyro4.expose
def rpc(self, hostname, cmd, *params1, **params2):
h = self.net.get(hostname)
return getattr(h, cmd)(*params1, **params2)
@Pyro4.expose
def attr(self, hostname, name):
h = self.net.get(hostname)
return getattr(h, name)
@Pyro4.expose
def addHost(self, name, cls=None, **params):
self.net.addHost(name, cls, **params)
return name
@Pyro4.expose
def addSwitch(self, name, cls=None, **params):
self.net.addSwitch(name, cls, **params)
#TODO: This should not be done here
self.net.get(name).start(self.net.controllers)
return name
@Pyro4.expose
def addController(self, name="c0", controller=None, **params):
self.net.addController(name, controller, **params)
return name
@Pyro4.expose
def addTunnel(self, name, switch, port, intf, STT=False, **params):
switch_i = self.net.get(switch)
if not intf:
intf = TCIntf
if STT:
subprocess.check_output(["ovs-vsctl","add-port", switch, name])
else:
intf(name, node=switch_i, port=port, link=None, **params)
@Pyro4.expose
def tunnelX11(self, node, display):
node = self.net.get(node)
(tunnel, popen) = mininet.term.tunnelX11(node, display)
self.x11popens.append(popen)
@Pyro4.expose
def addLink(self, node1, node2, port1=None, port2=None, cls=None,
**params):
node1 = self.net.get(node1)
node2 = self.net.get(node2)
l = self.net.addLink(node1, node2, port1, port2, cls, **params)
return ((node1.name, l.intf1.name), (node2.name, l.intf2.name))
@Pyro4.expose
def runCmdOnHost(self, hostname, command, noWait=False):
'''
e.g. runCmdOnHost('h1', 'ifconfig')
'''
h1 = self.net.get(hostname)
if noWait:
return h1.sendCmd(command)
else:
return h1.cmd(command)
def getFrontendStatus():
config = MaxiNetConfig(register=False)
ip = config.get_nameserver_ip()
port = config.get_nameserver_port()
pw = config.get_nameserver_password()
ns = Pyro4.locateNS(ip, port, hmac_key=pw)
manager_uri = ns.lookup("MaxiNetManager")
if(manager_uri):
manager = Pyro4.Proxy(manager_uri)
manager._pyroHmacKey=pw
print manager.print_worker_status()
else:
print "Could not contact Frontend server at %s:%s" % (ip, port)
def main():
parser = argparse.ArgumentParser(description="MaxiNet Worker which hosts a mininet instance")
parser.add_argument("--ip", action="store", help="Frontend Server IP")
parser.add_argument("--port", action="store", help="Frontend Server Port", type=int)
parser.add_argument("--password", action="store", help="Frontend Server Password")
parser.add_argument("-c", "--config", metavar="FILE", action="store", help="Read configuration from FILE")
parsed = parser.parse_args()
ip = False
port = False
pw = False
if (parsed.config or
os.path.isfile("MaxiNet.cfg") or
os.path.isfile(os.path.expanduser("~/.MaxiNet.cfg")) or
os.path.isfile("/etc/MaxiNet.cfg")):
if parsed.config:
config = MaxiNetConfig(file=parsed.config,register=False)
else:
config = MaxiNetConfig(register=False)
ip = config.get_nameserver_ip()
port = config.get_nameserver_port()
pw = config.get_nameserver_password()
if parsed.ip:
ip = parsed.ip
if parsed.port:
port = parsed.port
if parsed.password:
pw = parsed.password
if os.getuid() != 0:
print "MaxiNetWorker must run with root privileges!"
sys.exit(1)
if not (ip and port and pw):
print "Please provide MaxiNet.cfg or specify ip, port and password of \
the Frontend Server."
else:
workerserver = WorkerServer()
signal.signal(signal.SIGINT, workerserver.exit_handler)
workerserver.start(ip=ip, port=port, password=pw)
workerserver.monitorFrontend()
if(__name__ == "__main__"):
main()
| 34.429752 | 147 | 0.579033 |
import argparse
import atexit
import logging
import os
import signal
import subprocess
import sys
import tempfile
import time
from mininet.node import UserSwitch, OVSSwitch
from mininet.link import Link, TCIntf
import mininet.term
import Pyro4
import threading
import traceback
from MaxiNet.tools import Tools, MaxiNetConfig
from MaxiNet.WorkerServer.ssh_manager import SSH_Manager
try:
from mininet.net import Containernet as Mininet
except ImportError:
from mininet.net import Mininet
class WorkerServer(object):
"""Manages the Worker
The WorkerServer class connects to the nameserver and registers
itself with the MaxiNetManager instance. It is used by the Cluster instances
to start mininet instances, manage the ssh daemon and run commands etc.
Attributes:
logger: logging instance
mnManager: instance of class MininetManager which is used to create mininet
instances
sshManager: instance of class SSH_Manager which is used to manage the ssh
daemon.
ssh_folder: folder which holds configuration files for the ssh daemon.
ip: ip address of Worker
"""
def __init__(self):
self._ns = None
self._pyrodaemon = None
self.logger = logging.getLogger(__name__)
self._manager = None
self.mnManager = MininetManager()
self.sshManager = None
self.ssh_folder = tempfile.mkdtemp()
atexit.register(subprocess.call, ["rm", "-rf", self.ssh_folder])
logging.basicConfig(level=logging.DEBUG)
self.ip = None
self._shutdown = False
self._ip = None
self._port = None
self._password = None
self._looping_thread = None
def exit_handler(self, signal, frame):
print "exiting..."
self._shutdown = True
sys.exit()
@Pyro4.expose
def monitorFrontend(self):
""" function to monitor if the frontend is still alive.
if not, try to reconnect.
"""
while(not self._shutdown):
try:
self._manager.getStatus()
except:
if self._ip != None:
#self.ip as an indicator that this worker was connected to the frontend once.
print "Trying to reconnect to FrontendServer..."
try:
try:
self._pyrodaemon.unregister(self)
except:
pass
try:
self._pyrodaemon.unregister(self.mnManager)
except:
pass
try:
self._pyrodaemon.unregister(self.sshManager)
except:
pass
try:
self._pyrodaemon.shutdown()
except:
pass
try:
self._pyrodaemon.close()
except:
pass
self.start(self._ip, self._port, self._password)
except Exception as e:
traceback.print_exc(e)
pass
pass
time.sleep(5)
@Pyro4.expose
def start(self, ip, port, password, retry=float("inf")):
"""Start WorkerServer and ssh daemon and connect to nameserver."""
self.logger.info("starting up and connecting to %s:%d"
% (ip, port))
#store for reconnection attempts
self._ip = ip
self._port = port
self._password = password
#Pyro4.config.HMAC_KEY = password
tries=1
self._ns = None
while not self._ns:
try:
self._ns = Pyro4.locateNS(ip, port, hmac_key=password)
except Pyro4.errors.NamingError:
if tries < retry:
self.logger.warn("Unable to locate Nameserver. Trying again in 5 seconds...")
time.sleep(5)
tries += 1
else:
self.logger.error("Unable to locate Nameserver.")
sys.exit()
self.config = Pyro4.Proxy(self._ns.lookup("config"))
self.config._pyroHmacKey=password
self.ip = self.config.get_worker_ip(self.get_hostname())
if(not self.ip):
self.ip = Tools.guess_ip()
if not self.config.has_section(self.get_hostname()):
self.config.add_section(self.get_hostname())
self.config.set(self.get_hostname(), "ip", self.ip)
self.logger.warn("""FrontendServer did not know IP of this host (check configuration for hostname).
Guessed: %s""" % self.ip)
self.logger.info("configuring and starting ssh daemon...")
self.sshManager = SSH_Manager(folder=self.ssh_folder, ip=self.ip, port=self.config.get_sshd_port(), user=self.config.get("all", "sshuser"))
self.sshManager.start_sshd()
self._pyrodaemon = Pyro4.Daemon(host=self.ip)
self._pyrodaemon._pyroHmacKey=password
uri = self._pyrodaemon.register(self)
self._ns.register(self._get_pyroname(), uri)
uri = self._pyrodaemon.register(self.mnManager)
self._ns.register(self._get_pyroname()+".mnManager", uri)
uri = self._pyrodaemon.register(self.sshManager)
self._ns.register(self._get_pyroname()+".sshManager", uri)
atexit.register(self._stop)
self.logger.info("looking for manager application...")
manager_uri = self._ns.lookup("MaxiNetManager")
if(manager_uri):
self._manager = Pyro4.Proxy(manager_uri)
self._manager._pyroHmacKey=self._password
self.logger.info("signing in...")
if(self._manager.worker_signin(self._get_pyroname(), self.get_hostname())):
self.logger.info("done. Entering requestloop.")
self._started = True
self._looping_thread = threading.Thread(target=self._pyrodaemon.requestLoop)
self._looping_thread.daemon = True
self._looping_thread.start()
else:
self.logger.error("signin failed.")
else:
self.logger.error("no manager found.")
def _get_pyroname(self):
return "MaxiNetWorker_%s" % self.get_hostname()
@Pyro4.expose
def get_hostname(self):
return subprocess.check_output(["hostname"]).strip()
def _stop(self):
self.logger.info("signing out...")
if(self._manager):
self._manager.worker_signout(self.get_hostname())
self.logger.info("shutting down...")
self._ns.remove(self._get_pyroname())
self._ns.remove(self._get_pyroname()+".mnManager")
self._pyrodaemon.unregister(self)
self._pyrodaemon.unregister(self.mnManager)
self._pyrodaemon.unregister(self.sshManager)
self._pyrodaemon.shutdown()
self._pyrodaemon.close()
@Pyro4.expose
def remoteShutdown(self):
self._pyrodaemon.shutdown()
@Pyro4.expose
def stop(self):
(signedin, assigned) = self._manager.get_worker_status(self.get_hostname())
if(assigned):
self.logger.warn("can't shut down as worker is still assigned to id %d" % assigned)
return False
else:
self._stop()
return True
@Pyro4.expose
def check_output(self, cmd):
"""Run cmd on Worker and return output
Args:
cmd: command to call with optional parameters
Returns:
Shell output of command
"""
self.logger.debug("Executing %s" % cmd)
return subprocess.check_output(cmd, shell=True,
stderr=subprocess.STDOUT).strip()
@Pyro4.expose
def script_check_output(self, cmd):
"""Call MaxiNet Script and return output
Args:
cmd: name of script to call
Returns:
Shell output of script
"""
cmd = Tools.get_script_dir() + cmd
return self.check_output(cmd)
@Pyro4.expose
def run_cmd(self, command):
"""Call command (blocking)
Args:
command: command to call with optional parameters
"""
subprocess.call(command, shell=True)
@Pyro4.expose
def daemonize(self, cmd):
"""Call command (non-blocking)
Args:
command: command to call with optional parameters
"""
p = subprocess.Popen(cmd, shell=True)
atexit.register(p.terminate)
@Pyro4.expose
def daemonize_script(self, script, args):
"""Call MaxiNet Script (non-blocking)
Args:
cmd: name of script to call
"""
cmd = Tools.get_script_dir()+script+" "+args
p = subprocess.Popen(cmd, shell=True)
atexit.register(p.terminate)
class TCLinkParams(Link):
"""Link with symmetric TC interfaces
Like the mininet TCLink class but with support of the params1
and params2 arguments.
"""
def __init__(self, node1, node2, port1=None, port2=None,
intfName1=None, intfName2=None,
addr1=None, addr2=None, params1=None,
params2=None, **kvargs):
Link.__init__(self, node1, node2, port1=port1, port2=port2,
intfName1=intfName1, intfName2=intfName2,
cls1=TCIntf,
cls2=TCIntf,
addr1=addr1, addr2=addr2,
params1=params1,
params2=params2)
class MininetManager(object):
def __init__(self):
self.logger = logging.getLogger(__name__)
self.net = None
@Pyro4.expose
def create_mininet(self, topo, tunnels=[], switch=UserSwitch,
controller=None, STT=False):
if(not self.net is None):
self.logger.warn("running mininet instance detected!\
Shutting it down...")
self.destroy_mininet()
self.logger.info("Creating mininet instance")
try:
if controller:
self.net = Mininet(topo=topo, intf=TCIntf, link=TCLinkParams,
switch=switch, controller=controller)
else:
self.net = Mininet(topo=topo, intf=TCIntf, link=TCLinkParams,
switch=switch)
except Exception, e:
self.logger.error("Failed to create mininet instance: %s" % traceback.format_exc())
raise e
if STT:
self.logger.info("Starting Mininet...")
self.net.start()
self.logger.info("Adding tunnels to mininet instance")
for tunnel in tunnels:
port = None
cls = None
if "node1" not in tunnel[2].keys():
self.logger.info("Error! node1 is missing in tunnel metadata")
if tunnel[2]["node1"] in topo.nodes():
port = tunnel[2]["port1"]
else:
port = tunnel[2]["port2"]
if "cls" in tunnel[2].keys():
cls = tunnel[2]["cls"]
del tunnel[2]["cls"]
self.addTunnel(tunnel[0], tunnel[1], port, cls, STT=STT, **tunnel[2])
if not STT:
self.logger.info("Starting Mininet...")
self.net.start()
self.logger.info("Startup complete.")
self.x11popens = []
return True
@Pyro4.expose
def destroy_mininet(self):
"""shut down mininet instance"""
if self.net:
for popen in self.x11popens:
popen.terminate()
popen.communicate()
popen.wait()
self.net.stop()
self.logger.info("mininet instance terminated")
self.net = None
@Pyro4.expose
def configLinkStatus(self, src, dst, status):
self.net.configLinkStatus(src, dst, status)
@Pyro4.expose
def rpc(self, hostname, cmd, *params1, **params2):
h = self.net.get(hostname)
return getattr(h, cmd)(*params1, **params2)
@Pyro4.expose
def attr(self, hostname, name):
h = self.net.get(hostname)
return getattr(h, name)
@Pyro4.expose
def addHost(self, name, cls=None, **params):
self.net.addHost(name, cls, **params)
return name
@Pyro4.expose
def addSwitch(self, name, cls=None, **params):
self.net.addSwitch(name, cls, **params)
self.net.get(name).start(self.net.controllers)
return name
@Pyro4.expose
def addController(self, name="c0", controller=None, **params):
self.net.addController(name, controller, **params)
return name
@Pyro4.expose
def addTunnel(self, name, switch, port, intf, STT=False, **params):
switch_i = self.net.get(switch)
if not intf:
intf = TCIntf
if STT:
subprocess.check_output(["ovs-vsctl","add-port", switch, name])
else:
intf(name, node=switch_i, port=port, link=None, **params)
@Pyro4.expose
def tunnelX11(self, node, display):
node = self.net.get(node)
(tunnel, popen) = mininet.term.tunnelX11(node, display)
self.x11popens.append(popen)
@Pyro4.expose
def addLink(self, node1, node2, port1=None, port2=None, cls=None,
**params):
node1 = self.net.get(node1)
node2 = self.net.get(node2)
l = self.net.addLink(node1, node2, port1, port2, cls, **params)
return ((node1.name, l.intf1.name), (node2.name, l.intf2.name))
@Pyro4.expose
def runCmdOnHost(self, hostname, command, noWait=False):
'''
e.g. runCmdOnHost('h1', 'ifconfig')
'''
h1 = self.net.get(hostname)
if noWait:
return h1.sendCmd(command)
else:
return h1.cmd(command)
def getFrontendStatus():
config = MaxiNetConfig(register=False)
ip = config.get_nameserver_ip()
port = config.get_nameserver_port()
pw = config.get_nameserver_password()
ns = Pyro4.locateNS(ip, port, hmac_key=pw)
manager_uri = ns.lookup("MaxiNetManager")
if(manager_uri):
manager = Pyro4.Proxy(manager_uri)
manager._pyroHmacKey=pw
print manager.print_worker_status()
else:
print "Could not contact Frontend server at %s:%s" % (ip, port)
def main():
parser = argparse.ArgumentParser(description="MaxiNet Worker which hosts a mininet instance")
parser.add_argument("--ip", action="store", help="Frontend Server IP")
parser.add_argument("--port", action="store", help="Frontend Server Port", type=int)
parser.add_argument("--password", action="store", help="Frontend Server Password")
parser.add_argument("-c", "--config", metavar="FILE", action="store", help="Read configuration from FILE")
parsed = parser.parse_args()
ip = False
port = False
pw = False
if (parsed.config or
os.path.isfile("MaxiNet.cfg") or
os.path.isfile(os.path.expanduser("~/.MaxiNet.cfg")) or
os.path.isfile("/etc/MaxiNet.cfg")):
if parsed.config:
config = MaxiNetConfig(file=parsed.config,register=False)
else:
config = MaxiNetConfig(register=False)
ip = config.get_nameserver_ip()
port = config.get_nameserver_port()
pw = config.get_nameserver_password()
if parsed.ip:
ip = parsed.ip
if parsed.port:
port = parsed.port
if parsed.password:
pw = parsed.password
if os.getuid() != 0:
print "MaxiNetWorker must run with root privileges!"
sys.exit(1)
if not (ip and port and pw):
print "Please provide MaxiNet.cfg or specify ip, port and password of \
the Frontend Server."
else:
workerserver = WorkerServer()
signal.signal(signal.SIGINT, workerserver.exit_handler)
workerserver.start(ip=ip, port=port, password=pw)
workerserver.monitorFrontend()
if(__name__ == "__main__"):
main()
| false | true |
f71ce24d91b286d5f0da039e4c82f07b1fcc56b0 | 4,427 | py | Python | libs/trans/handler_0_1.py | StrayCamel247/Daily_utils | 9feeb09ebcf5ad5d3b91ab3c59e7c16855a51944 | [
"Apache-2.0"
] | 1 | 2020-12-11T13:57:46.000Z | 2020-12-11T13:57:46.000Z | libs/trans/handler_0_1.py | StrayCamel247/Daily_utils | 9feeb09ebcf5ad5d3b91ab3c59e7c16855a51944 | [
"Apache-2.0"
] | null | null | null | libs/trans/handler_0_1.py | StrayCamel247/Daily_utils | 9feeb09ebcf5ad5d3b91ab3c59e7c16855a51944 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
# __author__ : stray_camel
# __description__ : trans_0_1
# __REFERENCES__ : https://blog.csdn.net/qq_42544196/article/details/106468658;https://docs.python.org/3/library/logging.html
# __date__: 2020/12/11 15
import datetime
import logging
from pathlib import Path
import random
import re
import sys
import threading
import time
from functools import wraps
from hashlib import md5
from typing import Any
import requests
def logger_set():
"""
自定义日志格式,保存至对应文件
官方文档:https://docs.python.org/3/library/logging.html
"""
# 日志文件存储格式
logger = logging.getLogger()
logsf_loder = Path('./logs')
# 如果目录不存在则创建
logsf_loder.mkdir(parents=True, exist_ok=True)
# 储存的文件名,带时间戳后缀
logs_file = logsf_loder / \
"{}.log".format(datetime.datetime.now().strftime('%y-%m-%d'))
# 转为绝对路径
fh = logging.FileHandler(logs_file.resolve(), encoding="utf-8", mode="a")
logger.setLevel(logging.INFO)
fh.setFormatter(logging.Formatter("%(message)s\n"))
logger.addHandler(fh)
# 打印格式
console = logging.StreamHandler()
console.setLevel(logging.INFO)
formatter = logging.Formatter('%(message)s')
console.setFormatter(formatter)
logging.getLogger('').addHandler(console)
logger_set()
class YouDaoFanYi(object):
"""
引用于作者:https://blog.csdn.net/qq_42544196
"""
def __init__(self):
self.url = 'http://fanyi.youdao.com/translate_o?smartresult=dict&smartresult=rule'
self.headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) '
'Chrome/81.0.4044.138 Safari/537.36',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
'Referer': 'http://fanyi.youdao.com/',
'Cookie': 'OUTFOX_SEARCH_USER_ID="-1571440969@10.108.160.19"'
}
@staticmethod
def create_data(e):
n = "5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Safari/537.36"
t = md5(n.encode()).hexdigest()
r = int(time.time() * 1000)
i = int(str(r) + str(random.randint(0, 10)))
sign = md5(("fanyideskweb" + e + str(i) +
"Nw(nmmbP%A-r6U3EUn]Aj").encode()).hexdigest()
return {'ts': r, 'bv': t, 'salt': i, 'sign': sign}
def fanyi_word(self, word):
sys_data = self.create_data(word)
data = {
'i': word,
'from': 'AUTO',
'to': 'AUTO',
'smartresult': 'dict',
'client': 'fanyideskweb',
'doctype': 'json',
'version': 2.1,
'keyfrom': 'fanyi.web',
'action': 'FY_BY_REALTlME'
}
result = requests.post(url=self.url, headers=self.headers, data={
**data, **sys_data}).json()
# print(result)
return result
def main(self, word):
self.fanyi_word(word)
def thread_handler(func):
"""
TODO:多线程运行
"""
@wraps(func)
def wrapper(self, *args, **kwargs):
pass
return wrapper
class fanyi(YouDaoFanYi):
"""
定制翻译对象
"""
def _fanyi_word(self, word):
res = self.fanyi_word(word=word)
try:
if res.get('translateResult'):
smartResults = res.get('smartResult', {}).get('entries', [])
results = [
re.sub("[\!\%\\t\\r\\n]", "", res)
for res in smartResults
if res
]
rest = '\n '.join([word]+results) if results else ''
return rest
except:
pass
def word_analysis_copy(self, *args: '翻译单个或多个单词', **kwds: Any):
"""
"""
args = [' '.join(_.split('_')) if '_' in _ else _ for _ in set(args)]
logging.info('\n'.join(map(self._fanyi_word, args)))
def __call__(self, *args: Any, **kwds: Any) -> Any:
self.word_analysis_copy(*args)
"""
---------------------------------------------------------------------------
代码运行
>>> python .\main.py trans
>>> trans
>>> n. (Trans) (丹)唐(人名)
>>> abbr. 交易;交易行为;交流;事务 (transaction);及物的;(关系)可递的;过度的 (transitive);(尤指职业)翻译;翻译程序;电
>>> 视差频转播机 (translator)
>>> adj. 反式的;跨性别的;(有机体)异型结合的
---------------------------------------------------------------------------
"""
# fanyi = fanyi()
# fanyi(*sys.argv[1:])
| 29.125 | 125 | 0.550486 |
import datetime
import logging
from pathlib import Path
import random
import re
import sys
import threading
import time
from functools import wraps
from hashlib import md5
from typing import Any
import requests
def logger_set():
logger = logging.getLogger()
logsf_loder = Path('./logs')
logsf_loder.mkdir(parents=True, exist_ok=True)
logs_file = logsf_loder / \
"{}.log".format(datetime.datetime.now().strftime('%y-%m-%d'))
fh = logging.FileHandler(logs_file.resolve(), encoding="utf-8", mode="a")
logger.setLevel(logging.INFO)
fh.setFormatter(logging.Formatter("%(message)s\n"))
logger.addHandler(fh)
console = logging.StreamHandler()
console.setLevel(logging.INFO)
formatter = logging.Formatter('%(message)s')
console.setFormatter(formatter)
logging.getLogger('').addHandler(console)
logger_set()
class YouDaoFanYi(object):
def __init__(self):
self.url = 'http://fanyi.youdao.com/translate_o?smartresult=dict&smartresult=rule'
self.headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) '
'Chrome/81.0.4044.138 Safari/537.36',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
'Referer': 'http://fanyi.youdao.com/',
'Cookie': 'OUTFOX_SEARCH_USER_ID="-1571440969@10.108.160.19"'
}
@staticmethod
def create_data(e):
n = "5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Safari/537.36"
t = md5(n.encode()).hexdigest()
r = int(time.time() * 1000)
i = int(str(r) + str(random.randint(0, 10)))
sign = md5(("fanyideskweb" + e + str(i) +
"Nw(nmmbP%A-r6U3EUn]Aj").encode()).hexdigest()
return {'ts': r, 'bv': t, 'salt': i, 'sign': sign}
def fanyi_word(self, word):
sys_data = self.create_data(word)
data = {
'i': word,
'from': 'AUTO',
'to': 'AUTO',
'smartresult': 'dict',
'client': 'fanyideskweb',
'doctype': 'json',
'version': 2.1,
'keyfrom': 'fanyi.web',
'action': 'FY_BY_REALTlME'
}
result = requests.post(url=self.url, headers=self.headers, data={
**data, **sys_data}).json()
return result
def main(self, word):
self.fanyi_word(word)
def thread_handler(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
pass
return wrapper
class fanyi(YouDaoFanYi):
def _fanyi_word(self, word):
res = self.fanyi_word(word=word)
try:
if res.get('translateResult'):
smartResults = res.get('smartResult', {}).get('entries', [])
results = [
re.sub("[\!\%\\t\\r\\n]", "", res)
for res in smartResults
if res
]
rest = '\n '.join([word]+results) if results else ''
return rest
except:
pass
def word_analysis_copy(self, *args: '翻译单个或多个单词', **kwds: Any):
args = [' '.join(_.split('_')) if '_' in _ else _ for _ in set(args)]
logging.info('\n'.join(map(self._fanyi_word, args)))
def __call__(self, *args: Any, **kwds: Any) -> Any:
self.word_analysis_copy(*args)
| true | true |
f71ce2dda7e8afdb46370fe9f341989c34e72ab8 | 5,864 | py | Python | tensorflow_datasets/testing/mocking.py | robbjr/datasets | fbb2af9d0e88f8e2ae884e9764fbeff2ee487813 | [
"Apache-2.0"
] | 1 | 2019-10-12T08:05:11.000Z | 2019-10-12T08:05:11.000Z | tensorflow_datasets/testing/mocking.py | robbjr/datasets | fbb2af9d0e88f8e2ae884e9764fbeff2ee487813 | [
"Apache-2.0"
] | null | null | null | tensorflow_datasets/testing/mocking.py | robbjr/datasets | fbb2af9d0e88f8e2ae884e9764fbeff2ee487813 | [
"Apache-2.0"
] | 1 | 2019-12-14T00:32:08.000Z | 2019-12-14T00:32:08.000Z | # coding=utf-8
# Copyright 2019 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Mock util for tfds.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import contextlib
import os
import random
from absl.testing import absltest
import numpy as np
import tensorflow as tf
from tensorflow_datasets.core import features as features_lib
@contextlib.contextmanager
def mock_data(num_examples=1, as_dataset_fn=None, data_dir=None):
"""Mock tfds to generate random data.
This function requires the true metadata files (dataset_info.json, label.txt,
vocabulary files) to be stored in `data_dir/dataset_name/version`, as they
would be for the true dataset.
The actual examples will be randomly generated using
`builder.info.features.get_tensor_info()`.
Download and prepare step will be skipped.
Warning: As the mocked builder will use the true metadata (label names,...),
the `info.split['train'].num_examples` won't match `len(list(ds_train))`.
Usage (automated):
```
with mock_data(num_examples=5):
ds = tfds.load('some_dataset', split='train')
for ex in ds: # ds will yield randomly generated examples.
ex
```
If you want more fine grain control over the generated examples, you can
manually overwrite the `DatasetBuilder._as_dataset` method.
Usage (manual):
```
def as_dataset(self, *args, **kwargs):
return tf.data.Dataset.from_generator(
lambda: ({
'image': np.ones(shape=(28, 28, 1), dtype=np.uint8),
'label': i % 10,
} for i in range(num_examples)),
output_types=self.info.features.dtype,
output_shapes=self.info.features.shape,
)
with mock_data(as_dataset_fn=as_dataset):
ds = tfds.load('some_dataset', split='train')
for ex in ds: # ds will yield the fake data example of 'as_dataset'.
ex
```
Args:
num_examples: `int`, the number of fake example to generate.
as_dataset_fn: if provided, will replace the default random example
generator. This function mock the `FileAdapterBuilder._as_dataset`
data_dir: `str`, `data_dir` folder from where to load the metadata.
Will overwrite `data_dir` kwargs from `tfds.load`.
Yields:
None
"""
def mock_download_and_prepare(self, *args, **kwargs):
del args
del kwargs
if not tf.io.gfile.exists(self._data_dir): # pylint: disable=protected-access
raise ValueError(
'TFDS has been mocked, but metadata files where not found in {}. '
'You should copy the real metadata files, so that the dataset '
'can be loaded properly, or set the data_dir kwarg of'
'tfds.testing.mock_tfds(data_dir=...).'
''.format(self._data_dir) # pylint: disable=protected-access
)
def mock_as_dataset(self, *args, **kwargs):
del args
del kwargs
ds = tf.data.Dataset.from_generator(
lambda: (_generate_random_example(self) for _ in range(num_examples)),
output_types=self.info.features.dtype,
output_shapes=self.info.features.shape,
)
return ds
if not as_dataset_fn:
as_dataset_fn = mock_as_dataset
if not data_dir:
data_dir = os.path.join(os.path.dirname(__file__), 'metadata')
download_and_prepare_path = 'tensorflow_datasets.core.dataset_builder.DatasetBuilder.download_and_prepare'
as_dataset_path = 'tensorflow_datasets.core.dataset_builder.FileAdapterBuilder._as_dataset'
data_dir_path = 'tensorflow_datasets.core.constants.DATA_DIR'
with absltest.mock.patch(as_dataset_path, as_dataset_fn), \
absltest.mock.patch(
download_and_prepare_path, mock_download_and_prepare), \
absltest.mock.patch(data_dir_path, data_dir):
yield
def _generate_random_array(feature, tensor_info):
"""Generates a random tensor for a single feature."""
# TODO(tfds): Could improve the fake generatiion:
# * Use the feature statistics (min, max)
# * For Sequence features
# * For Text
shape = [ # Fill dynamic shape with random values
np.random.randint(5, 50) if s is None else s
for s in tensor_info.shape
]
if isinstance(feature, features_lib.ClassLabel):
max_value = feature.num_classes
elif isinstance(feature, features_lib.Text) and feature.vocab_size:
max_value = feature.vocab_size
else:
max_value = 255
# Generate some random values, depending on the dtype
if tensor_info.dtype.is_integer:
return np.random.randint(0, max_value, shape)
elif tensor_info.dtype.is_floating:
return np.random.random_sample(shape)
elif tensor_info.dtype == tf.string:
return ''.join(
random.choice(' abcdefghij') for _ in range(random.randint(10, 20)))
else:
raise ValueError('Fake generation not supported for {}'.format(
tensor_info.dtype))
def _generate_random_example(builder):
root_feature = builder.info.features
flat_features = root_feature._flatten(root_feature) # pylint: disable=protected-access
flat_tensor_info = root_feature._flatten(root_feature.get_tensor_info()) # pylint: disable=protected-access
flat_np = [
_generate_random_array(feature, tensor_info)
for feature, tensor_info in zip(flat_features, flat_tensor_info)
]
return root_feature._nest(flat_np) # pylint: disable=protected-access
| 34.698225 | 110 | 0.722715 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import contextlib
import os
import random
from absl.testing import absltest
import numpy as np
import tensorflow as tf
from tensorflow_datasets.core import features as features_lib
@contextlib.contextmanager
def mock_data(num_examples=1, as_dataset_fn=None, data_dir=None):
def mock_download_and_prepare(self, *args, **kwargs):
del args
del kwargs
if not tf.io.gfile.exists(self._data_dir):
raise ValueError(
'TFDS has been mocked, but metadata files where not found in {}. '
'You should copy the real metadata files, so that the dataset '
'can be loaded properly, or set the data_dir kwarg of'
'tfds.testing.mock_tfds(data_dir=...).'
''.format(self._data_dir)
)
def mock_as_dataset(self, *args, **kwargs):
del args
del kwargs
ds = tf.data.Dataset.from_generator(
lambda: (_generate_random_example(self) for _ in range(num_examples)),
output_types=self.info.features.dtype,
output_shapes=self.info.features.shape,
)
return ds
if not as_dataset_fn:
as_dataset_fn = mock_as_dataset
if not data_dir:
data_dir = os.path.join(os.path.dirname(__file__), 'metadata')
download_and_prepare_path = 'tensorflow_datasets.core.dataset_builder.DatasetBuilder.download_and_prepare'
as_dataset_path = 'tensorflow_datasets.core.dataset_builder.FileAdapterBuilder._as_dataset'
data_dir_path = 'tensorflow_datasets.core.constants.DATA_DIR'
with absltest.mock.patch(as_dataset_path, as_dataset_fn), \
absltest.mock.patch(
download_and_prepare_path, mock_download_and_prepare), \
absltest.mock.patch(data_dir_path, data_dir):
yield
def _generate_random_array(feature, tensor_info):
shape = [
np.random.randint(5, 50) if s is None else s
for s in tensor_info.shape
]
if isinstance(feature, features_lib.ClassLabel):
max_value = feature.num_classes
elif isinstance(feature, features_lib.Text) and feature.vocab_size:
max_value = feature.vocab_size
else:
max_value = 255
if tensor_info.dtype.is_integer:
return np.random.randint(0, max_value, shape)
elif tensor_info.dtype.is_floating:
return np.random.random_sample(shape)
elif tensor_info.dtype == tf.string:
return ''.join(
random.choice(' abcdefghij') for _ in range(random.randint(10, 20)))
else:
raise ValueError('Fake generation not supported for {}'.format(
tensor_info.dtype))
def _generate_random_example(builder):
root_feature = builder.info.features
flat_features = root_feature._flatten(root_feature)
flat_tensor_info = root_feature._flatten(root_feature.get_tensor_info())
flat_np = [
_generate_random_array(feature, tensor_info)
for feature, tensor_info in zip(flat_features, flat_tensor_info)
]
return root_feature._nest(flat_np)
| true | true |
f71ce321bd7159819de5bbecec6b6484d3ec718a | 12,450 | py | Python | spyder/plugins/editor/utils/tests/test_autosave.py | Earthman100/spyder | 949ce0f9100a69504c70a5678e8589a05aee7d38 | [
"MIT"
] | 7,956 | 2015-02-17T01:19:09.000Z | 2022-03-31T21:52:15.000Z | spyder/plugins/editor/utils/tests/test_autosave.py | Earthman100/spyder | 949ce0f9100a69504c70a5678e8589a05aee7d38 | [
"MIT"
] | 16,326 | 2015-02-16T23:15:21.000Z | 2022-03-31T23:34:34.000Z | spyder/plugins/editor/utils/tests/test_autosave.py | Earthman100/spyder | 949ce0f9100a69504c70a5678e8589a05aee7d38 | [
"MIT"
] | 1,918 | 2015-02-20T19:26:26.000Z | 2022-03-31T19:03:25.000Z | # -*- coding: utf-8 -*-
#
# Copyright © Spyder Project Contributors
# Licensed under the terms of the MIT License
#
"""Tests for autosave.py"""
# Standard library imports
import ast
import os.path as osp
# Third party imports
import pytest
# Local imports
from spyder.plugins.editor.utils.autosave import (AutosaveForStack,
AutosaveForPlugin)
def test_autosave_component_set_interval(mocker):
"""Test that setting the interval does indeed change it and calls
do_autosave if enabled."""
mocker.patch.object(AutosaveForPlugin, 'do_autosave')
addon = AutosaveForPlugin(None)
addon.do_autosave.assert_not_called()
addon.interval = 10000
assert addon.interval == 10000
addon.do_autosave.assert_not_called()
addon.enabled = True
addon.interval = 20000
assert addon.do_autosave.called
@pytest.mark.parametrize('enabled', [False, True])
def test_autosave_component_timer_if_enabled(qtbot, mocker, enabled):
"""Test that AutosaveForPlugin calls do_autosave() on timer if enabled."""
mocker.patch.object(AutosaveForPlugin, 'do_autosave')
addon = AutosaveForPlugin(None)
addon.do_autosave.assert_not_called()
addon.interval = 100
addon.enabled = enabled
qtbot.wait(500)
if enabled:
assert addon.do_autosave.called
else:
addon.do_autosave.assert_not_called()
def test_get_files_to_recover_with_empty_autosave_dir(mocker, tmpdir):
"""Test get_files_to_recover() when autosave dir contains no files."""
mocker.patch('spyder.plugins.editor.utils.autosave.get_conf_path',
return_value=str(tmpdir))
addon = AutosaveForPlugin(None)
result = addon.get_files_to_recover()
assert result == ([], [])
@pytest.mark.parametrize('running,empty',
[(True, False), (False, False), (False, True)])
def test_get_files_to_recover_with_one_pid_file(mocker, tmpdir,
running, empty):
"""Test get_files_to_recover() if autosave dir contains one pid file with
one autosave file. If running is True, then pretend that the pid file
belongs to a running Spyder instance. If empty is True, then the pid file
is empty (regression test for spyder-ide/spyder#11375)."""
mocker.patch('spyder.plugins.editor.utils.autosave.get_conf_path',
return_value=str(tmpdir))
mock_is_spyder_process = mocker.patch(
'spyder.plugins.editor.utils.autosave.is_spyder_process',
return_value=running)
pidfile = tmpdir.join('pid42.txt')
autosavefile = tmpdir.join('foo.py')
if empty:
pidfile.write('')
else:
pidfile.write('{"original": ' + repr(str(autosavefile)) + '}')
autosavefile.write('bar = 1')
addon = AutosaveForPlugin(None)
result = addon.get_files_to_recover()
if empty: # pid file corrupted so original file name not recorded
expected_files = [(None, str(autosavefile))]
elif running: # autosave file belongs to running instance
expected_files = []
else:
expected_files = [('original', str(autosavefile))]
expected = (expected_files, [str(pidfile)])
assert result == expected
mock_is_spyder_process.assert_called_with(42)
def test_get_files_to_recover_with_non_pid_file(mocker, tmpdir):
"""Test get_files_to_recover() if autosave dir contains no pid file, but
one Python file."""
mocker.patch('spyder.plugins.editor.utils.autosave.get_conf_path',
return_value=str(tmpdir))
pythonfile = tmpdir.join('foo.py')
pythonfile.write('bar = 1')
addon = AutosaveForPlugin(None)
result = addon.get_files_to_recover()
expected = ([(None, str(pythonfile))], [])
assert result == expected
def test_get_files_to_recover_without_autosave_dir(mocker):
"""Test that get_files_to_recover() does not break if there is no autosave
directory."""
mocker.patch('spyder.plugins.editor.utils.autosave.get_conf_path',
return_value='non-existing-directory')
addon = AutosaveForPlugin(None)
result = addon.get_files_to_recover()
assert result == ([], [])
@pytest.mark.parametrize('error_on_remove', [False, True])
def test_try_recover(mocker, tmpdir, error_on_remove):
"""Test that try_recover_from_autosave() displays a RecoveryDialog, that
it stores the files that the user wants to open as reported by the dialog,
and that it removes the pid file. If error_on_remove is set, then
removing the pid file will raise an OSError; this should be ignored."""
mock_RecoveryDialog = mocker.patch(
'spyder.plugins.editor.utils.autosave.RecoveryDialog')
mocker.patch('spyder.plugins.editor.utils.autosave.get_conf_path',
return_value=str(tmpdir))
pidfile = tmpdir.join('pid42.txt')
autosavefile = tmpdir.join('foo.py')
pidfile.write('{"original": ' + repr(str(autosavefile)) + '}')
autosavefile.write('bar = 1')
addon = AutosaveForPlugin(None)
if error_on_remove:
mocker.patch('os.remove', side_effect=OSError)
addon.try_recover_from_autosave()
expected_mapping = [('original', str(autosavefile))]
mock_RecoveryDialog.assert_called_with(expected_mapping, parent=None)
expected_files_to_open = mock_RecoveryDialog().files_to_open[:]
assert addon.recover_files_to_open == expected_files_to_open
if not error_on_remove:
assert not pidfile.check()
@pytest.mark.parametrize('in_mapping,on_disk',
[(False, False), (True, False), (False, True)])
def test_create_unique_autosave_filename(mocker, in_mapping, on_disk):
"""Test that AutosaveForStack.create_unique_autosave_filename() returns
a file name in the autosave directory with the same base name as the
original file name, unless that already exists in the autosave mapping
or on disk."""
def new_exists(path):
if path == osp.join('autosave', 'ham.py'):
return on_disk
else:
return False
mocker.patch('os.path.exists', side_effect=new_exists)
addon = AutosaveForStack(mocker.Mock())
if in_mapping:
addon.name_mapping = {osp.join('somedir', 'ham.py'):
osp.join('autosave', 'ham.py')}
autosave_filename = addon.create_unique_autosave_filename(
osp.join('orig', 'ham.py'), 'autosave')
if in_mapping or on_disk:
assert autosave_filename == osp.join('autosave', 'ham-1.py')
else:
assert autosave_filename == osp.join('autosave', 'ham.py')
@pytest.mark.parametrize('have_hash', [True, False])
def test_autosave(mocker, have_hash):
"""Test that AutosaveForStack.maybe_autosave writes the contents to the
autosave file and updates the file_hashes."""
mock_editor = mocker.Mock()
mock_fileinfo = mocker.Mock(editor=mock_editor, filename='orig',
newly_created=False)
mock_document = mocker.Mock()
mock_fileinfo.editor.document.return_value = mock_document
mock_stack = mocker.Mock(data=[mock_fileinfo])
addon = AutosaveForStack(mock_stack)
addon.name_mapping = {'orig': 'autosave'}
addon.file_hashes = {'autosave': 2}
if have_hash:
addon.file_hashes['orig'] = 1
mock_stack.compute_hash.return_value = 3
addon.maybe_autosave(0)
mock_stack._write_to_file.assert_called_with(mock_fileinfo, 'autosave')
mock_stack.compute_hash.assert_called_with(mock_fileinfo)
if have_hash:
assert addon.file_hashes == {'orig': 1, 'autosave': 3}
else:
assert addon.file_hashes == {'autosave': 3}
@pytest.mark.parametrize('latin', [True, False])
def test_save_autosave_mapping_with_nonempty_mapping(mocker, tmpdir, latin):
"""Test that save_autosave_mapping() writes the current autosave mapping
to the correct file if the mapping is not empty."""
mocker.patch('os.getpid', return_value=42)
mocker.patch('spyder.plugins.editor.utils.autosave.get_conf_path',
return_value=str(tmpdir))
addon = AutosaveForStack(None)
if latin:
addon.name_mapping = {'orig': 'autosave'}
else:
addon.name_mapping = {'原件': 'autosave'}
addon.save_autosave_mapping()
pidfile = tmpdir.join('pid42.txt')
assert ast.literal_eval(pidfile.read()) == addon.name_mapping
@pytest.mark.parametrize('pidfile_exists', [False, True])
def test_save_autosave_mapping_with_empty_mapping(mocker, tmpdir,
pidfile_exists):
"""Test that save_autosave_mapping() does not write the pidfile if the
mapping is empty, and that is removes the pidfile if it exists."""
mocker.patch('os.getpid', return_value=42)
mocker.patch('spyder.plugins.editor.utils.autosave.get_conf_path',
return_value=str(tmpdir))
addon = AutosaveForStack(None)
addon.name_mapping = {}
pidfile = tmpdir.join('pid42.txt')
if pidfile_exists:
pidfile.write('This is an ex-parrot!')
addon.save_autosave_mapping()
assert not pidfile.check()
@pytest.mark.parametrize('exception', [False, True])
def test_autosave_remove_autosave_file(mocker, exception):
"""Test that AutosaveForStack.remove_autosave_file removes the autosave
file, that an error dialog is displayed if an exception is raised,
and that the autosave file is removed from `name_mapping` and
`file_hashes`."""
mock_remove = mocker.patch('os.remove')
if exception:
mock_remove.side_effect = OSError()
mock_dialog = mocker.patch(
'spyder.plugins.editor.utils.autosave.AutosaveErrorDialog')
mock_stack = mocker.Mock()
fileinfo = mocker.Mock()
fileinfo.filename = 'orig'
addon = AutosaveForStack(mock_stack)
addon.name_mapping = {'orig': 'autosave'}
addon.file_hashes = {'autosave': 42}
addon.remove_autosave_file(fileinfo.filename)
assert addon.name_mapping == {}
assert addon.file_hashes == {}
mock_remove.assert_any_call('autosave')
assert mock_dialog.called == exception
def test_get_autosave_filename(mocker, tmpdir):
"""Test that AutosaveForStack.get_autosave_filename returns a consistent
and unique name for the autosave file is returned."""
addon = AutosaveForStack(mocker.Mock())
mocker.patch('spyder.plugins.editor.utils.autosave.get_conf_path',
return_value=str(tmpdir))
expected = str(tmpdir.join('foo.py'))
assert addon.get_autosave_filename('foo.py') == expected
expected2 = str(tmpdir.join('foo-1.py'))
assert addon.get_autosave_filename('foo.py') == expected
assert addon.get_autosave_filename('ham/foo.py') == expected2
@pytest.mark.parametrize('have_hash', [True, False])
def test_autosave_file_renamed(mocker, tmpdir, have_hash):
"""Test that AutosaveForStack.file_renamed removes the old autosave file,
creates a new one, and updates `name_mapping` and `file_hashes`."""
mock_remove = mocker.patch('os.remove')
mocker.patch('spyder.plugins.editor.utils.autosave.get_conf_path',
return_value=str(tmpdir))
mock_editor = mocker.Mock()
mock_fileinfo = mocker.Mock(editor=mock_editor, filename='new_foo.py',
newly_created=False)
mock_document = mocker.Mock()
mock_fileinfo.editor.document.return_value = mock_document
mock_stack = mocker.Mock(data=[mock_fileinfo])
mock_stack.has_filename.return_value = 0
mock_stack.compute_hash.return_value = 3
addon = AutosaveForStack(mock_stack)
old_autosavefile = str(tmpdir.join('old_foo.py'))
new_autosavefile = str(tmpdir.join('new_foo.py'))
addon.name_mapping = {'old_foo.py': old_autosavefile}
addon.file_hashes = {'old_foo.py': 1, old_autosavefile: 42}
if have_hash:
addon.file_hashes = {'old_foo.py': 1, old_autosavefile: 42}
else:
addon.file_hashes = {old_autosavefile: 42}
addon.file_renamed('old_foo.py', 'new_foo.py')
mock_remove.assert_any_call(old_autosavefile)
mock_stack._write_to_file.assert_called_with(
mock_fileinfo, new_autosavefile)
assert addon.name_mapping == {'new_foo.py': new_autosavefile}
if have_hash:
assert addon.file_hashes == {'new_foo.py': 1, new_autosavefile: 3}
else:
assert addon.file_hashes == {new_autosavefile: 3}
if __name__ == "__main__":
pytest.main()
| 38.544892 | 78 | 0.693574 |
import ast
import os.path as osp
import pytest
from spyder.plugins.editor.utils.autosave import (AutosaveForStack,
AutosaveForPlugin)
def test_autosave_component_set_interval(mocker):
mocker.patch.object(AutosaveForPlugin, 'do_autosave')
addon = AutosaveForPlugin(None)
addon.do_autosave.assert_not_called()
addon.interval = 10000
assert addon.interval == 10000
addon.do_autosave.assert_not_called()
addon.enabled = True
addon.interval = 20000
assert addon.do_autosave.called
@pytest.mark.parametrize('enabled', [False, True])
def test_autosave_component_timer_if_enabled(qtbot, mocker, enabled):
mocker.patch.object(AutosaveForPlugin, 'do_autosave')
addon = AutosaveForPlugin(None)
addon.do_autosave.assert_not_called()
addon.interval = 100
addon.enabled = enabled
qtbot.wait(500)
if enabled:
assert addon.do_autosave.called
else:
addon.do_autosave.assert_not_called()
def test_get_files_to_recover_with_empty_autosave_dir(mocker, tmpdir):
mocker.patch('spyder.plugins.editor.utils.autosave.get_conf_path',
return_value=str(tmpdir))
addon = AutosaveForPlugin(None)
result = addon.get_files_to_recover()
assert result == ([], [])
@pytest.mark.parametrize('running,empty',
[(True, False), (False, False), (False, True)])
def test_get_files_to_recover_with_one_pid_file(mocker, tmpdir,
running, empty):
mocker.patch('spyder.plugins.editor.utils.autosave.get_conf_path',
return_value=str(tmpdir))
mock_is_spyder_process = mocker.patch(
'spyder.plugins.editor.utils.autosave.is_spyder_process',
return_value=running)
pidfile = tmpdir.join('pid42.txt')
autosavefile = tmpdir.join('foo.py')
if empty:
pidfile.write('')
else:
pidfile.write('{"original": ' + repr(str(autosavefile)) + '}')
autosavefile.write('bar = 1')
addon = AutosaveForPlugin(None)
result = addon.get_files_to_recover()
if empty:
expected_files = [(None, str(autosavefile))]
elif running:
expected_files = []
else:
expected_files = [('original', str(autosavefile))]
expected = (expected_files, [str(pidfile)])
assert result == expected
mock_is_spyder_process.assert_called_with(42)
def test_get_files_to_recover_with_non_pid_file(mocker, tmpdir):
mocker.patch('spyder.plugins.editor.utils.autosave.get_conf_path',
return_value=str(tmpdir))
pythonfile = tmpdir.join('foo.py')
pythonfile.write('bar = 1')
addon = AutosaveForPlugin(None)
result = addon.get_files_to_recover()
expected = ([(None, str(pythonfile))], [])
assert result == expected
def test_get_files_to_recover_without_autosave_dir(mocker):
mocker.patch('spyder.plugins.editor.utils.autosave.get_conf_path',
return_value='non-existing-directory')
addon = AutosaveForPlugin(None)
result = addon.get_files_to_recover()
assert result == ([], [])
@pytest.mark.parametrize('error_on_remove', [False, True])
def test_try_recover(mocker, tmpdir, error_on_remove):
mock_RecoveryDialog = mocker.patch(
'spyder.plugins.editor.utils.autosave.RecoveryDialog')
mocker.patch('spyder.plugins.editor.utils.autosave.get_conf_path',
return_value=str(tmpdir))
pidfile = tmpdir.join('pid42.txt')
autosavefile = tmpdir.join('foo.py')
pidfile.write('{"original": ' + repr(str(autosavefile)) + '}')
autosavefile.write('bar = 1')
addon = AutosaveForPlugin(None)
if error_on_remove:
mocker.patch('os.remove', side_effect=OSError)
addon.try_recover_from_autosave()
expected_mapping = [('original', str(autosavefile))]
mock_RecoveryDialog.assert_called_with(expected_mapping, parent=None)
expected_files_to_open = mock_RecoveryDialog().files_to_open[:]
assert addon.recover_files_to_open == expected_files_to_open
if not error_on_remove:
assert not pidfile.check()
@pytest.mark.parametrize('in_mapping,on_disk',
[(False, False), (True, False), (False, True)])
def test_create_unique_autosave_filename(mocker, in_mapping, on_disk):
def new_exists(path):
if path == osp.join('autosave', 'ham.py'):
return on_disk
else:
return False
mocker.patch('os.path.exists', side_effect=new_exists)
addon = AutosaveForStack(mocker.Mock())
if in_mapping:
addon.name_mapping = {osp.join('somedir', 'ham.py'):
osp.join('autosave', 'ham.py')}
autosave_filename = addon.create_unique_autosave_filename(
osp.join('orig', 'ham.py'), 'autosave')
if in_mapping or on_disk:
assert autosave_filename == osp.join('autosave', 'ham-1.py')
else:
assert autosave_filename == osp.join('autosave', 'ham.py')
@pytest.mark.parametrize('have_hash', [True, False])
def test_autosave(mocker, have_hash):
mock_editor = mocker.Mock()
mock_fileinfo = mocker.Mock(editor=mock_editor, filename='orig',
newly_created=False)
mock_document = mocker.Mock()
mock_fileinfo.editor.document.return_value = mock_document
mock_stack = mocker.Mock(data=[mock_fileinfo])
addon = AutosaveForStack(mock_stack)
addon.name_mapping = {'orig': 'autosave'}
addon.file_hashes = {'autosave': 2}
if have_hash:
addon.file_hashes['orig'] = 1
mock_stack.compute_hash.return_value = 3
addon.maybe_autosave(0)
mock_stack._write_to_file.assert_called_with(mock_fileinfo, 'autosave')
mock_stack.compute_hash.assert_called_with(mock_fileinfo)
if have_hash:
assert addon.file_hashes == {'orig': 1, 'autosave': 3}
else:
assert addon.file_hashes == {'autosave': 3}
@pytest.mark.parametrize('latin', [True, False])
def test_save_autosave_mapping_with_nonempty_mapping(mocker, tmpdir, latin):
mocker.patch('os.getpid', return_value=42)
mocker.patch('spyder.plugins.editor.utils.autosave.get_conf_path',
return_value=str(tmpdir))
addon = AutosaveForStack(None)
if latin:
addon.name_mapping = {'orig': 'autosave'}
else:
addon.name_mapping = {'原件': 'autosave'}
addon.save_autosave_mapping()
pidfile = tmpdir.join('pid42.txt')
assert ast.literal_eval(pidfile.read()) == addon.name_mapping
@pytest.mark.parametrize('pidfile_exists', [False, True])
def test_save_autosave_mapping_with_empty_mapping(mocker, tmpdir,
pidfile_exists):
mocker.patch('os.getpid', return_value=42)
mocker.patch('spyder.plugins.editor.utils.autosave.get_conf_path',
return_value=str(tmpdir))
addon = AutosaveForStack(None)
addon.name_mapping = {}
pidfile = tmpdir.join('pid42.txt')
if pidfile_exists:
pidfile.write('This is an ex-parrot!')
addon.save_autosave_mapping()
assert not pidfile.check()
@pytest.mark.parametrize('exception', [False, True])
def test_autosave_remove_autosave_file(mocker, exception):
mock_remove = mocker.patch('os.remove')
if exception:
mock_remove.side_effect = OSError()
mock_dialog = mocker.patch(
'spyder.plugins.editor.utils.autosave.AutosaveErrorDialog')
mock_stack = mocker.Mock()
fileinfo = mocker.Mock()
fileinfo.filename = 'orig'
addon = AutosaveForStack(mock_stack)
addon.name_mapping = {'orig': 'autosave'}
addon.file_hashes = {'autosave': 42}
addon.remove_autosave_file(fileinfo.filename)
assert addon.name_mapping == {}
assert addon.file_hashes == {}
mock_remove.assert_any_call('autosave')
assert mock_dialog.called == exception
def test_get_autosave_filename(mocker, tmpdir):
addon = AutosaveForStack(mocker.Mock())
mocker.patch('spyder.plugins.editor.utils.autosave.get_conf_path',
return_value=str(tmpdir))
expected = str(tmpdir.join('foo.py'))
assert addon.get_autosave_filename('foo.py') == expected
expected2 = str(tmpdir.join('foo-1.py'))
assert addon.get_autosave_filename('foo.py') == expected
assert addon.get_autosave_filename('ham/foo.py') == expected2
@pytest.mark.parametrize('have_hash', [True, False])
def test_autosave_file_renamed(mocker, tmpdir, have_hash):
mock_remove = mocker.patch('os.remove')
mocker.patch('spyder.plugins.editor.utils.autosave.get_conf_path',
return_value=str(tmpdir))
mock_editor = mocker.Mock()
mock_fileinfo = mocker.Mock(editor=mock_editor, filename='new_foo.py',
newly_created=False)
mock_document = mocker.Mock()
mock_fileinfo.editor.document.return_value = mock_document
mock_stack = mocker.Mock(data=[mock_fileinfo])
mock_stack.has_filename.return_value = 0
mock_stack.compute_hash.return_value = 3
addon = AutosaveForStack(mock_stack)
old_autosavefile = str(tmpdir.join('old_foo.py'))
new_autosavefile = str(tmpdir.join('new_foo.py'))
addon.name_mapping = {'old_foo.py': old_autosavefile}
addon.file_hashes = {'old_foo.py': 1, old_autosavefile: 42}
if have_hash:
addon.file_hashes = {'old_foo.py': 1, old_autosavefile: 42}
else:
addon.file_hashes = {old_autosavefile: 42}
addon.file_renamed('old_foo.py', 'new_foo.py')
mock_remove.assert_any_call(old_autosavefile)
mock_stack._write_to_file.assert_called_with(
mock_fileinfo, new_autosavefile)
assert addon.name_mapping == {'new_foo.py': new_autosavefile}
if have_hash:
assert addon.file_hashes == {'new_foo.py': 1, new_autosavefile: 3}
else:
assert addon.file_hashes == {new_autosavefile: 3}
if __name__ == "__main__":
pytest.main()
| true | true |
f71ce3a01442e4158b8f21fcbecc9788b785237a | 2,662 | py | Python | rocon_client_sdk_py/virtual_core/actions/action_dock.py | boklae/rocon_client_sdk_py | 47ffd3a466fbbcb43f77338eddd9a9fa32b3a763 | [
"MIT"
] | null | null | null | rocon_client_sdk_py/virtual_core/actions/action_dock.py | boklae/rocon_client_sdk_py | 47ffd3a466fbbcb43f77338eddd9a9fa32b3a763 | [
"MIT"
] | 1 | 2021-06-08T21:01:30.000Z | 2021-06-08T21:01:30.000Z | rocon_client_sdk_py/virtual_core/actions/action_dock.py | boklae/rocon_client_sdk_py | 47ffd3a466fbbcb43f77338eddd9a9fa32b3a763 | [
"MIT"
] | null | null | null | from rocon_client_sdk_py.virtual_core.actions.base import Action
import asyncio
import pydash
from rocon_client_sdk_py.virtual_core.path_planner import PathPlanner
class Dock(Action):
def __init__(self):
self.name = 'Dock'
self.func_name = 'dock'
async def on_define(self, context):
print('define action of ' + self.name)
api_config = context.api_configuration
result = await api_config.get_stations()
domain_station = []
def cb(station):
domain_station.append({'alias': station['name']+'('+str(station['marker_value'])+')', 'value': station['id']})
pydash.map_(result, cb)
return {
'name': self.name,
'func_name': self.func_name,
'args': [
{
'key': 'station',
'type': 'number',
'default': domain_station[len(domain_station) -1],
'domain': domain_station
}
]
}
async def on_perform(self, context, args):
station_id = pydash.find(args, {'key': 'station'})['value']
station = await context.api_configuration.get_stations(station_id)
if station is None:
print('failed to get station')
worker = context.blackboard.get_worker()
worker_location = pydash.get(worker, 'type_specific.location')
path_planner = PathPlanner(context)
await path_planner.init_map()
path = path_planner.get_path(worker_location['map'], worker_location['pose2d'], station['pose'])
trajectory = path_planner.path_to_trajectory(path, 1, 1000)
print('start to moving robot on path')
for point in trajectory:
worker = context.blackboard.get_worker()
updated_type_specific = worker['type_specific']
if 'theta' in point:
pass
else:
point['theta'] = pydash.get(worker, 'type_specific.location.pose2d.theta')
updated_type_specific['location'] = pydash.assign({}, updated_type_specific['location'], {
'map': worker_location['map'],
'pose2d': point
})
context.blackboard.set_worker({'type_specific': updated_type_specific})
await context.blackboard.sync_worker()
await asyncio.sleep(1)
updated_type_specific['location']['pose2d']['theta'] = station['pose']['theta']
context.blackboard.set_worker({'type_specific': updated_type_specific})
await context.blackboard.sync_worker()
await asyncio.sleep(1)
return True
| 35.026316 | 122 | 0.597295 | from rocon_client_sdk_py.virtual_core.actions.base import Action
import asyncio
import pydash
from rocon_client_sdk_py.virtual_core.path_planner import PathPlanner
class Dock(Action):
def __init__(self):
self.name = 'Dock'
self.func_name = 'dock'
async def on_define(self, context):
print('define action of ' + self.name)
api_config = context.api_configuration
result = await api_config.get_stations()
domain_station = []
def cb(station):
domain_station.append({'alias': station['name']+'('+str(station['marker_value'])+')', 'value': station['id']})
pydash.map_(result, cb)
return {
'name': self.name,
'func_name': self.func_name,
'args': [
{
'key': 'station',
'type': 'number',
'default': domain_station[len(domain_station) -1],
'domain': domain_station
}
]
}
async def on_perform(self, context, args):
station_id = pydash.find(args, {'key': 'station'})['value']
station = await context.api_configuration.get_stations(station_id)
if station is None:
print('failed to get station')
worker = context.blackboard.get_worker()
worker_location = pydash.get(worker, 'type_specific.location')
path_planner = PathPlanner(context)
await path_planner.init_map()
path = path_planner.get_path(worker_location['map'], worker_location['pose2d'], station['pose'])
trajectory = path_planner.path_to_trajectory(path, 1, 1000)
print('start to moving robot on path')
for point in trajectory:
worker = context.blackboard.get_worker()
updated_type_specific = worker['type_specific']
if 'theta' in point:
pass
else:
point['theta'] = pydash.get(worker, 'type_specific.location.pose2d.theta')
updated_type_specific['location'] = pydash.assign({}, updated_type_specific['location'], {
'map': worker_location['map'],
'pose2d': point
})
context.blackboard.set_worker({'type_specific': updated_type_specific})
await context.blackboard.sync_worker()
await asyncio.sleep(1)
updated_type_specific['location']['pose2d']['theta'] = station['pose']['theta']
context.blackboard.set_worker({'type_specific': updated_type_specific})
await context.blackboard.sync_worker()
await asyncio.sleep(1)
return True
| true | true |
f71ce3f2ed68163326db9d66587e4654492691b3 | 3,152 | py | Python | backend/chat/models.py | crowdbotics-apps/dsfs-28863 | fea2672275927bd37d23e2267273e0eae54340d2 | [
"FTL",
"AML",
"RSA-MD"
] | null | null | null | backend/chat/models.py | crowdbotics-apps/dsfs-28863 | fea2672275927bd37d23e2267273e0eae54340d2 | [
"FTL",
"AML",
"RSA-MD"
] | null | null | null | backend/chat/models.py | crowdbotics-apps/dsfs-28863 | fea2672275927bd37d23e2267273e0eae54340d2 | [
"FTL",
"AML",
"RSA-MD"
] | null | null | null | from django.conf import settings
from django.db import models
class ForwardedMessage(models.Model):
"Generated Model"
message = models.ForeignKey(
"chat.Message",
on_delete=models.CASCADE,
related_name="forwardedmessage_message",
)
forwarded_by = models.ForeignKey(
"chat_user_profile.Profile",
on_delete=models.CASCADE,
related_name="forwardedmessage_forwarded_by",
)
forwarded_to = models.ForeignKey(
"chat.Thread",
on_delete=models.CASCADE,
related_name="forwardedmessage_forwarded_to",
)
timestamp_forwarded = models.DateTimeField(
auto_now_add=True,
)
class Message(models.Model):
"Generated Model"
message = models.TextField()
thread = models.ForeignKey(
"chat.Thread",
on_delete=models.CASCADE,
related_name="message_thread",
)
sent_by = models.ForeignKey(
"chat.ThreadMember",
on_delete=models.CASCADE,
related_name="message_sent_by",
)
attachment = models.URLField()
is_draft = models.BooleanField()
is_delivered = models.BooleanField()
is_read = models.BooleanField()
timestamp_created = models.DateTimeField(
auto_now_add=True,
)
timestamp_delivered = models.DateTimeField()
timestamp_read = models.DateTimeField()
class Thread(models.Model):
"Generated Model"
name = models.CharField(
max_length=255,
)
thread_photo = models.URLField()
timestamp_created = models.DateTimeField(
auto_now_add=True,
)
class ThreadAction(models.Model):
"Generated Model"
action = models.CharField(
max_length=7,
)
thread = models.ForeignKey(
"chat.Thread",
on_delete=models.CASCADE,
related_name="threadaction_thread",
)
profile = models.ForeignKey(
"chat_user_profile.Profile",
on_delete=models.CASCADE,
related_name="threadaction_profile",
)
timestamp_action = models.DateTimeField(
auto_now_add=True,
)
class MessageAction(models.Model):
"Generated Model"
action = models.CharField(
max_length=7,
)
message = models.ForeignKey(
"chat.Message",
on_delete=models.CASCADE,
related_name="messageaction_message",
)
profile = models.ForeignKey(
"chat_user_profile.Profile",
on_delete=models.CASCADE,
related_name="messageaction_profile",
)
timestamp_action = models.DateTimeField(
auto_now_add=True,
)
class ThreadMember(models.Model):
"Generated Model"
profile = models.ForeignKey(
"chat_user_profile.Profile",
on_delete=models.CASCADE,
related_name="threadmember_profile",
)
thread = models.ForeignKey(
"chat.Thread",
on_delete=models.CASCADE,
related_name="threadmember_thread",
)
is_admin = models.BooleanField()
timestamp_joined = models.DateTimeField(
auto_now_add=True,
)
timestamp_left = models.DateTimeField()
last_rejoined = models.DateTimeField()
# Create your models here.
| 25.626016 | 53 | 0.659581 | from django.conf import settings
from django.db import models
class ForwardedMessage(models.Model):
message = models.ForeignKey(
"chat.Message",
on_delete=models.CASCADE,
related_name="forwardedmessage_message",
)
forwarded_by = models.ForeignKey(
"chat_user_profile.Profile",
on_delete=models.CASCADE,
related_name="forwardedmessage_forwarded_by",
)
forwarded_to = models.ForeignKey(
"chat.Thread",
on_delete=models.CASCADE,
related_name="forwardedmessage_forwarded_to",
)
timestamp_forwarded = models.DateTimeField(
auto_now_add=True,
)
class Message(models.Model):
message = models.TextField()
thread = models.ForeignKey(
"chat.Thread",
on_delete=models.CASCADE,
related_name="message_thread",
)
sent_by = models.ForeignKey(
"chat.ThreadMember",
on_delete=models.CASCADE,
related_name="message_sent_by",
)
attachment = models.URLField()
is_draft = models.BooleanField()
is_delivered = models.BooleanField()
is_read = models.BooleanField()
timestamp_created = models.DateTimeField(
auto_now_add=True,
)
timestamp_delivered = models.DateTimeField()
timestamp_read = models.DateTimeField()
class Thread(models.Model):
name = models.CharField(
max_length=255,
)
thread_photo = models.URLField()
timestamp_created = models.DateTimeField(
auto_now_add=True,
)
class ThreadAction(models.Model):
action = models.CharField(
max_length=7,
)
thread = models.ForeignKey(
"chat.Thread",
on_delete=models.CASCADE,
related_name="threadaction_thread",
)
profile = models.ForeignKey(
"chat_user_profile.Profile",
on_delete=models.CASCADE,
related_name="threadaction_profile",
)
timestamp_action = models.DateTimeField(
auto_now_add=True,
)
class MessageAction(models.Model):
action = models.CharField(
max_length=7,
)
message = models.ForeignKey(
"chat.Message",
on_delete=models.CASCADE,
related_name="messageaction_message",
)
profile = models.ForeignKey(
"chat_user_profile.Profile",
on_delete=models.CASCADE,
related_name="messageaction_profile",
)
timestamp_action = models.DateTimeField(
auto_now_add=True,
)
class ThreadMember(models.Model):
profile = models.ForeignKey(
"chat_user_profile.Profile",
on_delete=models.CASCADE,
related_name="threadmember_profile",
)
thread = models.ForeignKey(
"chat.Thread",
on_delete=models.CASCADE,
related_name="threadmember_thread",
)
is_admin = models.BooleanField()
timestamp_joined = models.DateTimeField(
auto_now_add=True,
)
timestamp_left = models.DateTimeField()
last_rejoined = models.DateTimeField()
| true | true |
f71ce4194dd771aa309e312526fdfe217a9bdfea | 2,299 | py | Python | src/models/vectorTraining.py | Will03/NVSM_pytorch | 45e91efa6e4571a955c0f76807f2d6b5d7ffa66a | [
"MIT"
] | null | null | null | src/models/vectorTraining.py | Will03/NVSM_pytorch | 45e91efa6e4571a955c0f76807f2d6b5d7ffa66a | [
"MIT"
] | null | null | null | src/models/vectorTraining.py | Will03/NVSM_pytorch | 45e91efa6e4571a955c0f76807f2d6b5d7ffa66a | [
"MIT"
] | 1 | 2020-06-04T06:45:43.000Z | 2020-06-04T06:45:43.000Z | import numpy as np
from sklearn.metrics.pairwise import cosine_similarity
from sklearn.feature_extraction.text import TfidfVectorizer
import os
dataPath = '../../Willll/' # Relative path of homework data
# r=root, d=directories, f = files
DocList = []
QueryList = []
DocData = []
QueryData = []
def articleParser(myPath):
with open(myPath, 'r') as fp:
docData = fp.read().replace('\n', '')
return docData
# read Query List
with open(dataPath+'test/query_list.txt', 'r') as fp:
tmpLine = fp.readline()
while tmpLine:
tmpLine = tmpLine.strip('\n')
if tmpLine != '':
QueryList.append(tmpLine)
tmpLine = fp.readline()
# Read query data
for eachQ in QueryList:
QueryData.append(articleParser(dataPath+'test/query/%s'%eachQ))
for r, d, f in os.walk(dataPath+'doc'):
for file in f:
DocList.append(file)
for eachD in DocList:
DocData.append(articleParser(dataPath+'doc/'+eachD))
# TF-IDF
max_df = 0.95 # Ignore words with high df. (Similar effect to stopword filtering)
min_df = 5 # Ignore words with low df.
smooth_idf = True # Smooth idf weights by adding 1 to df.
sublinear_tf = True # Replace tf with 1 + log(tf).
# Rocchio (Below is a param set called Ide Dec-Hi)
alpha = 1
beta = 0.75
gamma = 0.15
rel_count = 5 # Use top-5 relevant documents to update query vector.
nrel_count = 1 # Use only the most non-relevant document to update query vector.
iters = 5
print('start train')
# Build TF-IDF vectors of docs and queries
vectorizer = TfidfVectorizer(max_df=max_df, min_df=min_df,
smooth_idf=smooth_idf, sublinear_tf=sublinear_tf)
doc_tfidfs = vectorizer.fit_transform(DocData).toarray()
query_vecs = vectorizer.transform(QueryData).toarray()
print('start count simi')
# Rank documents based on cosine similarity
cos_sim = cosine_similarity(query_vecs, doc_tfidfs)
rankings = np.flip(cos_sim.argsort(), axis=1)
print('start write file')
limit = 600
for query_name, ranking in zip(QueryList, rankings):
ranked_docs=''
index = 0
for idx in ranking:
if index >=600:
break
ranked_docs += '%s,'%DocList[idx]
with open('../../Willll/%s.txt'%query_name, mode='w') as file:
file.write('%s' % (ranked_docs))
| 28.7375 | 88 | 0.675511 | import numpy as np
from sklearn.metrics.pairwise import cosine_similarity
from sklearn.feature_extraction.text import TfidfVectorizer
import os
dataPath = '../../Willll/'
DocList = []
QueryList = []
DocData = []
QueryData = []
def articleParser(myPath):
with open(myPath, 'r') as fp:
docData = fp.read().replace('\n', '')
return docData
with open(dataPath+'test/query_list.txt', 'r') as fp:
tmpLine = fp.readline()
while tmpLine:
tmpLine = tmpLine.strip('\n')
if tmpLine != '':
QueryList.append(tmpLine)
tmpLine = fp.readline()
for eachQ in QueryList:
QueryData.append(articleParser(dataPath+'test/query/%s'%eachQ))
for r, d, f in os.walk(dataPath+'doc'):
for file in f:
DocList.append(file)
for eachD in DocList:
DocData.append(articleParser(dataPath+'doc/'+eachD))
max_df = 0.95
min_df = 5
smooth_idf = True
sublinear_tf = True
alpha = 1
beta = 0.75
gamma = 0.15
rel_count = 5
nrel_count = 1
iters = 5
print('start train')
vectorizer = TfidfVectorizer(max_df=max_df, min_df=min_df,
smooth_idf=smooth_idf, sublinear_tf=sublinear_tf)
doc_tfidfs = vectorizer.fit_transform(DocData).toarray()
query_vecs = vectorizer.transform(QueryData).toarray()
print('start count simi')
cos_sim = cosine_similarity(query_vecs, doc_tfidfs)
rankings = np.flip(cos_sim.argsort(), axis=1)
print('start write file')
limit = 600
for query_name, ranking in zip(QueryList, rankings):
ranked_docs=''
index = 0
for idx in ranking:
if index >=600:
break
ranked_docs += '%s,'%DocList[idx]
with open('../../Willll/%s.txt'%query_name, mode='w') as file:
file.write('%s' % (ranked_docs))
| true | true |
f71ce456b76c3630058e368b142c7aeace1c9037 | 4,589 | py | Python | Final.min.py | faiyazsamin/FaceRecognition | 9c0bd65f300784910a923f446cf33bacfc502b52 | [
"MIT"
] | 1 | 2019-01-27T11:05:11.000Z | 2019-01-27T11:05:11.000Z | Final.min.py | faiyazsamin/FaceRecognition | 9c0bd65f300784910a923f446cf33bacfc502b52 | [
"MIT"
] | null | null | null | Final.min.py | faiyazsamin/FaceRecognition | 9c0bd65f300784910a923f446cf33bacfc502b52 | [
"MIT"
] | null | null | null | import cv2
import numpy as np
import os
subjects = ["","Mama","Samin","Delwar"]
def detect_faces(colored_img, scaleFactor=1.06):
img_copy = colored_img.copy()
gray = cv2.cvtColor(img_copy, cv2.COLOR_BGR2GRAY)
f_cascade = cv2.CascadeClassifier('data/lbpcascade_frontalface.xml')
faces = f_cascade.detectMultiScale(gray, scaleFactor=scaleFactor, minNeighbors=5);
if len(faces) == 0:
return None, None
(x, y, w, h) = faces[0]
return gray[y:y+w, x:x+h], faces[0]
def prepare_training_data(data_folder_path):
dirs = os.listdir(data_folder_path)
faces = []
labels = []
for dir_name in dirs:
if not dir_name.startswith("s"):
continue
label = int(dir_name.replace("s", ""))
subject_dir_path = data_folder_path + "/" + dir_name
subject_images_names = os.listdir(subject_dir_path)
for image_name in subject_images_names:
if image_name.startswith("."):
continue
image_path = subject_dir_path + "/" + image_name
image = cv2.imread(image_path)
cv2.imshow("Training on image...", cv2.resize(image, (400, 500)))
cv2.waitKey(10)
face, rect = detect_faces(image)
if face is not None:
faces.append(face)
labels.append(label)
cv2.destroyAllWindows()
cv2.waitKey(1)
cv2.destroyAllWindows()
print("Total faces: ", len(faces))
print("Total labels: ", len(labels))
return faces, labels
def trainData(trainingDataPath, output_path):
face_recognizer = cv2.face.LBPHFaceRecognizer_create()
faces, labels = prepare_training_data(trainingDataPath)
face_recognizer.train(faces, np.array(labels))
face_recognizer.write(output_path)
def loadTrainedData(path):
recognizer = cv2.face.LBPHFaceRecognizer_create()
recognizer.read(path)
return recognizer
def predictStaticImage(test_img,trainer_file):
img = test_img.copy()
face, rect = detect_faces(img)
lt = loadTrainedData(trainer_file)
label, confidence = lt.predict(face)
label_text = subjects[label]
(x, y, w, h) = rect
cv2.rectangle(img, (x, y), (x + w, y + h), (0, 255, 0), 2)
cv2.putText(img, label_text, (rect[0], rect[1] - 5), cv2.FONT_HERSHEY_PLAIN, 1.5, (0, 255, 0), 2)
print("Confidence =",confidence)
return img
def showImage(image):
cv2.imshow('Frame', image)
cv2.waitKey(0)
cv2.destroyAllWindows()
def camToFile(framesToCapture,output_dir):
cam = cv2.VideoCapture(1)
detector = cv2.CascadeClassifier('data/haarcascade_frontalface_alt.xml')
sampleNum = 0
while True:
ret, img = cam.read()
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
face = detector.detectMultiScale(gray, 1.5, 5)
for (x, y, w, h) in face:
cv2.rectangle(img, (x, y), (x + w, y + h), (255, 0, 0), 2)
sampleNum = sampleNum + 1
if sampleNum%(100/framesToCapture) == 0:
print("Frames Captured:", int(sampleNum/(100/framesToCapture)))
cv2.imwrite(output_dir+"/"+ str(int(sampleNum/(100/framesToCapture))) + ".jpg", gray[y:y + h, x:x + w])
cv2.imshow('frame', img)
if cv2.waitKey(100) & 0xFF == ord('q'):
break
elif sampleNum >= 100:
break
def detectFace(trainer_file):
recognizer = cv2.face.LBPHFaceRecognizer_create()
recognizer.read(trainer_file)
faceCascade = cv2.CascadeClassifier("data/haarcascade_frontalface_alt.xml")
cam = cv2.VideoCapture(1)
font = cv2.FONT_HERSHEY_DUPLEX
while True:
ret, im = cam.read()
gray = cv2.cvtColor(im, cv2.COLOR_BGR2GRAY)
faces = faceCascade.detectMultiScale(gray, scaleFactor=1.2, minNeighbors=5, minSize=(100, 100),
flags=cv2.CASCADE_SCALE_IMAGE)
for (x, y, w, h) in faces:
nbr_predicted, conf = recognizer.predict(gray[y:y + h, x:x + w])
cv2.rectangle(im, (x - 50, y - 50), (x + w + 50, y + h + 50), (0, 225, 0), 2)
nbr_predicted = subjects[nbr_predicted]
cv2.putText(im, str(nbr_predicted), (x + 30, y + h + 30), font, 1, (0, 0, 225)) # Draw the text
cv2.imshow('FaceDetector', im)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
cam.release()
cv2.destroyAllWindows()
#trainData('training-data','test.yml')
detectFace('test.yml')
#showImage(predictStaticImage(cv2.imread("test-data/4.jpg"),'test3.yml'))
#camToFile(20,'training-data/s7') | 32.546099 | 119 | 0.619961 | import cv2
import numpy as np
import os
subjects = ["","Mama","Samin","Delwar"]
def detect_faces(colored_img, scaleFactor=1.06):
img_copy = colored_img.copy()
gray = cv2.cvtColor(img_copy, cv2.COLOR_BGR2GRAY)
f_cascade = cv2.CascadeClassifier('data/lbpcascade_frontalface.xml')
faces = f_cascade.detectMultiScale(gray, scaleFactor=scaleFactor, minNeighbors=5);
if len(faces) == 0:
return None, None
(x, y, w, h) = faces[0]
return gray[y:y+w, x:x+h], faces[0]
def prepare_training_data(data_folder_path):
dirs = os.listdir(data_folder_path)
faces = []
labels = []
for dir_name in dirs:
if not dir_name.startswith("s"):
continue
label = int(dir_name.replace("s", ""))
subject_dir_path = data_folder_path + "/" + dir_name
subject_images_names = os.listdir(subject_dir_path)
for image_name in subject_images_names:
if image_name.startswith("."):
continue
image_path = subject_dir_path + "/" + image_name
image = cv2.imread(image_path)
cv2.imshow("Training on image...", cv2.resize(image, (400, 500)))
cv2.waitKey(10)
face, rect = detect_faces(image)
if face is not None:
faces.append(face)
labels.append(label)
cv2.destroyAllWindows()
cv2.waitKey(1)
cv2.destroyAllWindows()
print("Total faces: ", len(faces))
print("Total labels: ", len(labels))
return faces, labels
def trainData(trainingDataPath, output_path):
face_recognizer = cv2.face.LBPHFaceRecognizer_create()
faces, labels = prepare_training_data(trainingDataPath)
face_recognizer.train(faces, np.array(labels))
face_recognizer.write(output_path)
def loadTrainedData(path):
recognizer = cv2.face.LBPHFaceRecognizer_create()
recognizer.read(path)
return recognizer
def predictStaticImage(test_img,trainer_file):
img = test_img.copy()
face, rect = detect_faces(img)
lt = loadTrainedData(trainer_file)
label, confidence = lt.predict(face)
label_text = subjects[label]
(x, y, w, h) = rect
cv2.rectangle(img, (x, y), (x + w, y + h), (0, 255, 0), 2)
cv2.putText(img, label_text, (rect[0], rect[1] - 5), cv2.FONT_HERSHEY_PLAIN, 1.5, (0, 255, 0), 2)
print("Confidence =",confidence)
return img
def showImage(image):
cv2.imshow('Frame', image)
cv2.waitKey(0)
cv2.destroyAllWindows()
def camToFile(framesToCapture,output_dir):
cam = cv2.VideoCapture(1)
detector = cv2.CascadeClassifier('data/haarcascade_frontalface_alt.xml')
sampleNum = 0
while True:
ret, img = cam.read()
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
face = detector.detectMultiScale(gray, 1.5, 5)
for (x, y, w, h) in face:
cv2.rectangle(img, (x, y), (x + w, y + h), (255, 0, 0), 2)
sampleNum = sampleNum + 1
if sampleNum%(100/framesToCapture) == 0:
print("Frames Captured:", int(sampleNum/(100/framesToCapture)))
cv2.imwrite(output_dir+"/"+ str(int(sampleNum/(100/framesToCapture))) + ".jpg", gray[y:y + h, x:x + w])
cv2.imshow('frame', img)
if cv2.waitKey(100) & 0xFF == ord('q'):
break
elif sampleNum >= 100:
break
def detectFace(trainer_file):
recognizer = cv2.face.LBPHFaceRecognizer_create()
recognizer.read(trainer_file)
faceCascade = cv2.CascadeClassifier("data/haarcascade_frontalface_alt.xml")
cam = cv2.VideoCapture(1)
font = cv2.FONT_HERSHEY_DUPLEX
while True:
ret, im = cam.read()
gray = cv2.cvtColor(im, cv2.COLOR_BGR2GRAY)
faces = faceCascade.detectMultiScale(gray, scaleFactor=1.2, minNeighbors=5, minSize=(100, 100),
flags=cv2.CASCADE_SCALE_IMAGE)
for (x, y, w, h) in faces:
nbr_predicted, conf = recognizer.predict(gray[y:y + h, x:x + w])
cv2.rectangle(im, (x - 50, y - 50), (x + w + 50, y + h + 50), (0, 225, 0), 2)
nbr_predicted = subjects[nbr_predicted]
cv2.putText(im, str(nbr_predicted), (x + 30, y + h + 30), font, 1, (0, 0, 225))
cv2.imshow('FaceDetector', im)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
cam.release()
cv2.destroyAllWindows()
detectFace('test.yml')
| true | true |
f71ce46ec0f8659d23bb2988ab53f559e7e4f0b9 | 838 | py | Python | oscar/lib/python2.7/site-packages/phonenumbers/data/region_883.py | AMuratTuran/mkn | 557086426773ced10d82c969304bd349414a601e | [
"BSD-3-Clause"
] | 4 | 2018-10-19T04:36:20.000Z | 2020-02-13T16:14:09.000Z | oscar/lib/python2.7/site-packages/phonenumbers/data/region_883.py | AMuratTuran/mkn | 557086426773ced10d82c969304bd349414a601e | [
"BSD-3-Clause"
] | 5 | 2020-03-24T16:37:25.000Z | 2021-06-10T21:24:54.000Z | upibo-venv/Lib/site-packages/phonenumbers/data/region_883.py | smbpgroup/upibo | 625dcda9f9692c62aeb9fe8f7123a5d407c610ae | [
"BSD-3-Clause"
] | null | null | null | """Auto-generated file, do not edit by hand. 883 metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_883 = PhoneMetadata(id='001', country_code=883, international_prefix=None,
general_desc=PhoneNumberDesc(national_number_pattern='51\\d{7}(?:\\d{3})?', possible_length=(9, 12)),
voip=PhoneNumberDesc(national_number_pattern='51(?:00\\d{5}(?:\\d{3})?|[13]0\\d{8})', example_number='510012345', possible_length=(9, 12)),
number_format=[NumberFormat(pattern='(\\d{3})(\\d{3})(\\d{3})', format='\\1 \\2 \\3', leading_digits_pattern=['510']),
NumberFormat(pattern='(\\d{3})(\\d{3})(\\d{3})(\\d{3})', format='\\1 \\2 \\3 \\4', leading_digits_pattern=['510']),
NumberFormat(pattern='(\\d{4})(\\d{4})(\\d{4})', format='\\1 \\2 \\3', leading_digits_pattern=['51[13]'])])
| 83.8 | 143 | 0.650358 | from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_883 = PhoneMetadata(id='001', country_code=883, international_prefix=None,
general_desc=PhoneNumberDesc(national_number_pattern='51\\d{7}(?:\\d{3})?', possible_length=(9, 12)),
voip=PhoneNumberDesc(national_number_pattern='51(?:00\\d{5}(?:\\d{3})?|[13]0\\d{8})', example_number='510012345', possible_length=(9, 12)),
number_format=[NumberFormat(pattern='(\\d{3})(\\d{3})(\\d{3})', format='\\1 \\2 \\3', leading_digits_pattern=['510']),
NumberFormat(pattern='(\\d{3})(\\d{3})(\\d{3})(\\d{3})', format='\\1 \\2 \\3 \\4', leading_digits_pattern=['510']),
NumberFormat(pattern='(\\d{4})(\\d{4})(\\d{4})', format='\\1 \\2 \\3', leading_digits_pattern=['51[13]'])])
| true | true |
f71ce4ec1624e15791c2a9cbe2ce17cdd55390d6 | 733 | py | Python | mycloud/drive/filesystem/file_version.py | ThomasGassmann/swisscom-my-cloud-backup | 97e222c45a54197c82c8f3a5d59aa20bf3382ed8 | [
"MIT"
] | 4 | 2019-11-28T22:10:43.000Z | 2022-01-23T15:18:26.000Z | mycloud/drive/filesystem/file_version.py | ThomasGassmann/swisscom-my-cloud-backup | 97e222c45a54197c82c8f3a5d59aa20bf3382ed8 | [
"MIT"
] | 18 | 2019-01-20T22:30:48.000Z | 2020-06-09T21:16:07.000Z | mycloud/drive/filesystem/file_version.py | thomasgassmann/mycloud-cli | 97e222c45a54197c82c8f3a5d59aa20bf3382ed8 | [
"MIT"
] | null | null | null | from abc import ABC, abstractmethod
from mycloud.common import sha256_file
from mycloud.constants import VERSION_HASH_LENGTH
class CalculatableVersion(ABC):
@abstractmethod
def calculate_version(self):
raise NotImplementedError()
class BasicStringVersion(CalculatableVersion):
def __init__(self, version: str):
self._version = version
def calculate_version(self):
return self._version
class HashCalculatedVersion(CalculatableVersion):
def __init__(self, local_file: str):
self.local_file = local_file
def calculate_version(self):
return sha256_file(self.local_file)[:VERSION_HASH_LENGTH]
def get_hash(self):
return sha256_file(self.local_file)
| 22.212121 | 65 | 0.740791 | from abc import ABC, abstractmethod
from mycloud.common import sha256_file
from mycloud.constants import VERSION_HASH_LENGTH
class CalculatableVersion(ABC):
@abstractmethod
def calculate_version(self):
raise NotImplementedError()
class BasicStringVersion(CalculatableVersion):
def __init__(self, version: str):
self._version = version
def calculate_version(self):
return self._version
class HashCalculatedVersion(CalculatableVersion):
def __init__(self, local_file: str):
self.local_file = local_file
def calculate_version(self):
return sha256_file(self.local_file)[:VERSION_HASH_LENGTH]
def get_hash(self):
return sha256_file(self.local_file)
| true | true |
f71ce668eb491f3489debb1b15bbb8e0d468f1c4 | 4,724 | py | Python | pyzoo/test/zoo/zouwu/autots/test_auto_ts.py | ankitdobhal/analytics-zoo | b8374bcd6c73bba49fe0b0ab075528cdd94cf2af | [
"Apache-2.0"
] | null | null | null | pyzoo/test/zoo/zouwu/autots/test_auto_ts.py | ankitdobhal/analytics-zoo | b8374bcd6c73bba49fe0b0ab075528cdd94cf2af | [
"Apache-2.0"
] | 1 | 2020-04-17T02:41:28.000Z | 2020-04-20T02:37:41.000Z | pyzoo/test/zoo/zouwu/autots/test_auto_ts.py | ankitdobhal/analytics-zoo | b8374bcd6c73bba49fe0b0ab075528cdd94cf2af | [
"Apache-2.0"
] | 1 | 2020-12-21T11:48:49.000Z | 2020-12-21T11:48:49.000Z | #
# Copyright 2018 Analytics Zoo Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import pytest
import numpy as np
from test.zoo.pipeline.utils.test_utils import ZooTestCase
from zoo.automl.config.recipe import LSTMGridRandomRecipe, MTNetGridRandomRecipe
from zoo.zouwu.autots.forecast import AutoTSTrainer
from zoo.zouwu.autots.forecast import TSPipeline
import pandas as pd
@pytest.mark.usefixtures("init_ray_context_fixture")
class TestZouwuAutoTS(ZooTestCase):
def setup_method(self, method):
# super(TestZouwuAutoTS, self).setup_method(method)
self.create_data()
def teardown_method(self, method):
pass
def create_data(self):
sample_num = np.random.randint(100, 200)
self.train_df = pd.DataFrame({"datetime": pd.date_range(
'1/1/2019', periods=sample_num), "value": np.random.randn(sample_num)})
val_sample_num = np.random.randint(20, 30)
self.validation_df = pd.DataFrame({"datetime": pd.date_range(
'1/1/2019', periods=val_sample_num), "value": np.random.randn(val_sample_num)})
def test_AutoTSTrainer_smoke(self):
horizon = np.random.randint(1, 6)
tsp = AutoTSTrainer(dt_col="datetime",
target_col="value",
horizon=horizon,
extra_features_col=None
)
pipeline = tsp.fit(self.train_df)
assert isinstance(pipeline, TSPipeline)
assert pipeline.internal.config is not None
evaluate_result = pipeline.evaluate(self.validation_df)
if horizon > 1:
assert evaluate_result[0].shape[0] == horizon
else:
assert evaluate_result[0]
predict_df = pipeline.predict(self.validation_df)
assert not predict_df.empty
def test_AutoTrainer_LstmRecipe(self):
horizon = np.random.randint(1, 6)
tsp = AutoTSTrainer(dt_col="datetime",
target_col="value",
horizon=horizon,
extra_features_col=None
)
pipeline = tsp.fit(self.train_df,
self.validation_df,
recipe=LSTMGridRandomRecipe(
num_rand_samples=5,
batch_size=[1024],
lstm_2_units=[8],
training_iteration=1,
epochs=1
))
assert isinstance(pipeline, TSPipeline)
assert pipeline.internal.config is not None
evaluate_result = pipeline.evaluate(self.validation_df)
if horizon > 1:
assert evaluate_result[0].shape[0] == horizon
else:
assert evaluate_result[0]
predict_df = pipeline.predict(self.validation_df)
assert not predict_df.empty
def test_AutoTrainer_MTNetRecipe(self):
horizon = np.random.randint(1, 6)
tsp = AutoTSTrainer(dt_col="datetime",
target_col="value",
horizon=horizon,
extra_features_col=None
)
pipeline = tsp.fit(self.train_df,
self.validation_df,
recipe=MTNetGridRandomRecipe(
num_rand_samples=5,
time_step=[5],
long_num=[2],
batch_size=[1024],
cnn_hid_size=[32, 50],
training_iteration=1,
epochs=1
))
assert isinstance(pipeline, TSPipeline)
assert pipeline.internal.config is not None
evaluate_result = pipeline.evaluate(self.validation_df)
if horizon > 1:
assert evaluate_result[0].shape[0] == horizon
else:
assert evaluate_result[0]
predict_df = pipeline.predict(self.validation_df)
assert not predict_df.empty
if __name__ == "__main__":
pytest.main([__file__])
| 39.041322 | 91 | 0.575783 |
import pytest
import numpy as np
from test.zoo.pipeline.utils.test_utils import ZooTestCase
from zoo.automl.config.recipe import LSTMGridRandomRecipe, MTNetGridRandomRecipe
from zoo.zouwu.autots.forecast import AutoTSTrainer
from zoo.zouwu.autots.forecast import TSPipeline
import pandas as pd
@pytest.mark.usefixtures("init_ray_context_fixture")
class TestZouwuAutoTS(ZooTestCase):
def setup_method(self, method):
self.create_data()
def teardown_method(self, method):
pass
def create_data(self):
sample_num = np.random.randint(100, 200)
self.train_df = pd.DataFrame({"datetime": pd.date_range(
'1/1/2019', periods=sample_num), "value": np.random.randn(sample_num)})
val_sample_num = np.random.randint(20, 30)
self.validation_df = pd.DataFrame({"datetime": pd.date_range(
'1/1/2019', periods=val_sample_num), "value": np.random.randn(val_sample_num)})
def test_AutoTSTrainer_smoke(self):
horizon = np.random.randint(1, 6)
tsp = AutoTSTrainer(dt_col="datetime",
target_col="value",
horizon=horizon,
extra_features_col=None
)
pipeline = tsp.fit(self.train_df)
assert isinstance(pipeline, TSPipeline)
assert pipeline.internal.config is not None
evaluate_result = pipeline.evaluate(self.validation_df)
if horizon > 1:
assert evaluate_result[0].shape[0] == horizon
else:
assert evaluate_result[0]
predict_df = pipeline.predict(self.validation_df)
assert not predict_df.empty
def test_AutoTrainer_LstmRecipe(self):
horizon = np.random.randint(1, 6)
tsp = AutoTSTrainer(dt_col="datetime",
target_col="value",
horizon=horizon,
extra_features_col=None
)
pipeline = tsp.fit(self.train_df,
self.validation_df,
recipe=LSTMGridRandomRecipe(
num_rand_samples=5,
batch_size=[1024],
lstm_2_units=[8],
training_iteration=1,
epochs=1
))
assert isinstance(pipeline, TSPipeline)
assert pipeline.internal.config is not None
evaluate_result = pipeline.evaluate(self.validation_df)
if horizon > 1:
assert evaluate_result[0].shape[0] == horizon
else:
assert evaluate_result[0]
predict_df = pipeline.predict(self.validation_df)
assert not predict_df.empty
def test_AutoTrainer_MTNetRecipe(self):
horizon = np.random.randint(1, 6)
tsp = AutoTSTrainer(dt_col="datetime",
target_col="value",
horizon=horizon,
extra_features_col=None
)
pipeline = tsp.fit(self.train_df,
self.validation_df,
recipe=MTNetGridRandomRecipe(
num_rand_samples=5,
time_step=[5],
long_num=[2],
batch_size=[1024],
cnn_hid_size=[32, 50],
training_iteration=1,
epochs=1
))
assert isinstance(pipeline, TSPipeline)
assert pipeline.internal.config is not None
evaluate_result = pipeline.evaluate(self.validation_df)
if horizon > 1:
assert evaluate_result[0].shape[0] == horizon
else:
assert evaluate_result[0]
predict_df = pipeline.predict(self.validation_df)
assert not predict_df.empty
if __name__ == "__main__":
pytest.main([__file__])
| true | true |
f71ce6ddebc57474b11797d83db4b3270ece5414 | 2,689 | py | Python | src/reports_api/models/phase_code.py | dinesh-aot/eao-project-reports | f2741d381371fb9a65b6b9f9909161eb333a7b21 | [
"Apache-2.0"
] | null | null | null | src/reports_api/models/phase_code.py | dinesh-aot/eao-project-reports | f2741d381371fb9a65b6b9f9909161eb333a7b21 | [
"Apache-2.0"
] | 8 | 2022-02-14T23:21:52.000Z | 2022-03-30T20:04:19.000Z | src/reports_api/models/phase_code.py | dinesh-aot/eao-project-reports | f2741d381371fb9a65b6b9f9909161eb333a7b21 | [
"Apache-2.0"
] | 4 | 2022-02-14T23:22:50.000Z | 2022-02-16T17:40:28.000Z | # Copyright © 2019 Province of British Columbia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Model to handle all operations related to Payment Disbursement status code."""
from sqlalchemy import Boolean, Column, ForeignKey, Integer, String
from sqlalchemy.orm import relationship
from .code_table import CodeTable
from .db import db
class PhaseCode(db.Model, CodeTable):
"""Model class for Phase."""
__tablename__ = 'phase_codes'
id = Column(Integer, primary_key=True, autoincrement=True) # TODO check how it can be inherited from parent
work_type_id = Column(ForeignKey('work_types.id'), nullable=False)
ea_act_id = Column(ForeignKey('ea_acts.id'), nullable=False)
start_event = Column(String())
end_event = Column(String)
duration = Column(Integer())
legislated = Column(Boolean())
sort_order = Column(Integer())
color = Column(String(15))
work_type = relationship('WorkType', foreign_keys=[work_type_id], lazy='select')
ea_act = relationship('EAAct', foreign_keys=[ea_act_id], lazy='select')
milestones = relationship("Milestone",
primaryjoin="PhaseCode.id==Milestone.phase_id",
back_populates="phase")
def as_dict(self):
"""Return Json representation."""
return {
'id': self.id,
'name': self.name,
'sort_order': self.sort_order,
'start_event': self.start_event,
'end_event': self.end_event,
'duration': self.duration,
'legislated': self.legislated,
'work_type': self.work_type.as_dict(),
'ea_act': self.ea_act.as_dict(),
'milestones': [milestone.as_dict() for milestone in self.milestones],
'color': self.color
}
@classmethod
def find_by_ea_act_and_work_type(cls, _ea_act_id, _work_type_id):
"""Given a id, this will return code master details."""
code_table = db.session.query(PhaseCode).filter_by(work_type_id=_work_type_id,
ea_act_id=_ea_act_id).all() # pylint: disable=no-member
return code_table
| 39.544118 | 115 | 0.661212 |
from sqlalchemy import Boolean, Column, ForeignKey, Integer, String
from sqlalchemy.orm import relationship
from .code_table import CodeTable
from .db import db
class PhaseCode(db.Model, CodeTable):
__tablename__ = 'phase_codes'
id = Column(Integer, primary_key=True, autoincrement=True)
work_type_id = Column(ForeignKey('work_types.id'), nullable=False)
ea_act_id = Column(ForeignKey('ea_acts.id'), nullable=False)
start_event = Column(String())
end_event = Column(String)
duration = Column(Integer())
legislated = Column(Boolean())
sort_order = Column(Integer())
color = Column(String(15))
work_type = relationship('WorkType', foreign_keys=[work_type_id], lazy='select')
ea_act = relationship('EAAct', foreign_keys=[ea_act_id], lazy='select')
milestones = relationship("Milestone",
primaryjoin="PhaseCode.id==Milestone.phase_id",
back_populates="phase")
def as_dict(self):
return {
'id': self.id,
'name': self.name,
'sort_order': self.sort_order,
'start_event': self.start_event,
'end_event': self.end_event,
'duration': self.duration,
'legislated': self.legislated,
'work_type': self.work_type.as_dict(),
'ea_act': self.ea_act.as_dict(),
'milestones': [milestone.as_dict() for milestone in self.milestones],
'color': self.color
}
@classmethod
def find_by_ea_act_and_work_type(cls, _ea_act_id, _work_type_id):
code_table = db.session.query(PhaseCode).filter_by(work_type_id=_work_type_id,
ea_act_id=_ea_act_id).all()
return code_table
| true | true |
f71ce75531d655af08723237dc7e58101caae903 | 554 | py | Python | Python/leetcode.252.meeting-rooms.py | tedye/leetcode | 975d7e3b8cb9b6be9e80e07febf4bcf6414acd46 | [
"MIT"
] | 4 | 2015-10-10T00:30:55.000Z | 2020-07-27T19:45:54.000Z | Python/leetcode.252.meeting-rooms.py | tedye/leetcode | 975d7e3b8cb9b6be9e80e07febf4bcf6414acd46 | [
"MIT"
] | null | null | null | Python/leetcode.252.meeting-rooms.py | tedye/leetcode | 975d7e3b8cb9b6be9e80e07febf4bcf6414acd46 | [
"MIT"
] | null | null | null | # Definition for an interval.
# class Interval(object):
# def __init__(self, s=0, e=0):
# self.start = s
# self.end = e
class Solution(object):
def canAttendMeetings(self, intervals):
"""
:type intervals: List[Interval]
:rtype: bool
"""
if len(intervals) < 2:
return True
intervals.sort(key = lambda x: x.start)
for i in xrange(1, len(intervals)):
if intervals[i-1].end > intervals[i].start:
return False
return True
| 27.7 | 55 | 0.534296 |
class Solution(object):
def canAttendMeetings(self, intervals):
if len(intervals) < 2:
return True
intervals.sort(key = lambda x: x.start)
for i in xrange(1, len(intervals)):
if intervals[i-1].end > intervals[i].start:
return False
return True
| true | true |
f71ce7d27a7503b6d207689cfb75ccb7b36f12cd | 9,482 | py | Python | code/pytorch/methods/SSAC.py | hzm2016/assistive-gym-robosuite | 5c529f4444cc386383618bfa584341740a8468f9 | [
"MIT"
] | 1 | 2021-11-22T07:45:28.000Z | 2021-11-22T07:45:28.000Z | code/pytorch/methods/SSAC.py | hzm2016/assistive-gym-robosuite | 5c529f4444cc386383618bfa584341740a8468f9 | [
"MIT"
] | null | null | null | code/pytorch/methods/SSAC.py | hzm2016/assistive-gym-robosuite | 5c529f4444cc386383618bfa584341740a8468f9 | [
"MIT"
] | null | null | null | import os
import torch
import torch.nn.functional as F
import glob
import numpy as np
from torch.optim import Adam
from utils.utils import soft_update, hard_update
from utils.model import GaussianPolicy, QNetwork, DeterministicPolicy
from keras.models import Sequential, Model
from keras.layers import Dense, Dropout, Input, merge, Lambda, Activation
from keras.layers.merge import Add, Multiply, Concatenate, concatenate
from keras.initializers import RandomUniform
from keras.optimizers import Adam
import keras.backend as K
from keras import metrics
def weighted_entropy(p, w_norm):
# w = tf.divide(tf.exp(A - np.max(A)), prob)
# w_norm = w / K.sum(w)
return K.sum(w_norm * p * K.log(p + 1e-8))
def weighted_mean(p, w_norm):
# w = tf.exp(A- np.max(A))
# w_norm = w / K.sum(w)
p_weighted = np.multiply(w_norm, p)
return K.mean(p_weighted, axis=0)
def weighted_mse(Q_target, Q_pred, w_norm):
# w = tf.exp(A- np.max(A))
# w_norm = w / K.sum(w)
error = K.square(Q_target - Q_pred)
return K.mean(w_norm * error)
def softmax(x):
col = x.shape[1]
x_max = np.reshape(np.amax(x, axis=1), (-1, 1))
e_x = np.exp(x - np.matlib.repmat(x_max, 1, col) )
e_x_sum = np.reshape( np.sum(e_x, axis=1), (-1, 1))
out = e_x / np.matlib.repmat(e_x_sum, 1, col)
return out
def weighted_mean_array(x, weights):
weights_mean = np.mean(weights, axis=1)
x_weighted = np.multiply(x, weights)
mean_weighted = np.divide(np.mean(x_weighted, axis=1), weights_mean)
return np.reshape(mean_weighted, (-1, 1))
def p_sample(p):
row, col = p.shape
p_sum = np.reshape(np.sum(p, axis=1), (row, 1))
p_normalized = p/np.matlib.repmat(p_sum, 1, col)
p_cumsum = np.matrix(np.cumsum( p_normalized, axis=1))
# print(p_cumsum[0])
rand = np.matlib.repmat(np.random.random((row, 1)), 1, col)
# print(rand[0])
o_softmax = np.argmax(p_cumsum >= rand, axis=1)
return o_softmax
def entropy(p):
return K.sum(p * K.log((p + 1e-8)))
def add_normal(x_input, outshape, at_eps):
"""
add normal noise to the input
"""
epsilon = K.random_normal(shape=outshape, mean=0., stddev=1.)
x_out = x_input + at_eps * np.multiply(epsilon, np.absolute(x_input))
return x_out
def kl(p, q):
return K.sum(p * K.log((p + 1e-8) / (q + 1e-8)))
class Multi_SAC(object):
def __init__(self, state_dim, action_dim, option_dim, max_action, action_space):
self.alpha = 0.2
self.lr = 0.0003
self.option_num = option_dim
self.policy_type = "Gaussian"
self.target_update_interval = 1
self.automatic_entropy_tuning = True
self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
""" critic network """
self.critic = QNetwork(state_dim, action_dim, 400).to(device=self.device)
self.critic_optim = Adam(self.critic.parameters(), lr=self.lr)
self.critic_target = QNetwork(state_dim, action_dim, 400).to(self.device)
hard_update(self.critic_target, self.critic)
self.sampling_prob = torch.FloatTensor(state).to(self.device)
# ===================================================================== #
# Option Model #
# ===================================================================== #
self.option_state_input, self.option_action_input, self.option_input_concat, self.option_out_dec, \
self.option_out, self.option_out_noise, self.option_model = self.create_option_model()
Advantage = np.stop_gradient(self.target_q_value - self.predicted_v_value)
Weight = np.divide(np.exp(Advantage - np.max(Advantage)), self.sampling_prob)
W_norm = Weight/K.mean(Weight)
critic_conditional_entropy = weighted_entropy(self.option_out, tf.stop_gradient(W_norm))
p_weighted_ave = weighted_mean(self.option_out, tf.stop_gradient(W_norm))
self.critic_entropy = critic_conditional_entropy - self.c_ent * entropy(p_weighted_ave)
self.vat_loss = kl(self.option_out, self.option_out_noise)
self.reg_loss = metrics.mean_absolute_error(self.option_input_concat, self.option_out_dec)
self.option_loss = self.reg_loss + self.entropy_coeff * (self.critic_entropy) + self.c_reg * self.vat_loss
self.option_optimize = tf.train.AdamOptimizer(self.option_lr).minimize(self.option_loss)
""" option network """
self.it = 0
if self.policy_type == "Gaussian":
# Target Entropy = −dim(A) (e.g. , -6 for HalfCheetah-v2) as given in the paper
if self.automatic_entropy_tuning == True:
self.target_entropy = -torch.prod(torch.Tensor(action_space.shape).to(self.device)).item()
self.log_alpha = torch.zeros(1, requires_grad=True, device=self.device)
self.alpha_optim = Adam([self.log_alpha], lr=self.lr)
self.policy = GaussianPolicy(state_dim, action_dim, 400, max_action).to(self.device)
self.policy_optim = Adam(self.policy.parameters(), lr=self.lr)
elif self.policy_type == "Multi_Gaussian":
if self.automatic_entropy_tuning == True:
self.target_entropy = -torch.prod(torch.Tensor(action_space.shape).to(self.device)).item()
self.log_alpha = torch.zeros(1, requires_grad=True, device=self.device)
self.alpha_optim = Adam([self.log_alpha], lr=self.lr)
self.policy = GaussianPolicy(state_dim, action_dim, 400, max_action).to(self.device)
self.policy_optim = Adam(self.policy.parameters(), lr=self.lr)
else:
self.alpha = 0
self.automatic_entropy_tuning = False
self.policy = DeterministicPolicy(state_dim, action_dim, 400, max_action).to(self.device)
self.policy_optim = Adam(self.policy.parameters(), lr=self.lr)
def select_action(self, state, eval=True):
state = torch.FloatTensor(state).to(self.device).unsqueeze(0)
if eval == False:
action, _, _ = self.policy.sample(state)
else:
_, _, action = self.policy.sample(state)
return action.detach().cpu().numpy()[0]
def train_actor_option(self, inputs, a_gradient, option):
self.sess.run(self.actor_optimizer_list[option], feed_dict={
self.actor_state_input_list[option]: inputs,
self.action_gradient_list[option]: a_gradient
})
def train_critic(self, inputs, action, target_q_value, predicted_v_value, sampling_prob):
return self.sess.run([self.critic_optimize], feed_dict={
self.critic_state_input: inputs,
self.critic_action_input: action,
self.target_q_value: target_q_value,
self.predicted_v_value: predicted_v_value,
self.sampling_prob: sampling_prob
})
def train_option(self, inputs, action, target_q_value, predicted_v_value, sampling_prob):
return self.sess.run([self.option_optimize], feed_dict={
self.option_state_input: inputs,
self.option_action_input: action,
self.target_q_value: target_q_value,
self.predicted_v_value: predicted_v_value,
self.sampling_prob: sampling_prob
})
def max_option(self, inputs):
Q_predict = []
n = inputs.shape[0]
for o in range(int(self.option_num)):
action_i = self.predict_actor_target(inputs, o)
Q_predict_i, _ = self.predict_critic_target(inputs, action_i)
if o == 0:
Q_predict = np.reshape(Q_predict_i, (-1, 1))
else:
Q_predict = np.concatenate((Q_predict, np.reshape(Q_predict_i, (-1, 1))), axis=1)
o_max = np.argmax(Q_predict, axis=1)
Q_max = np.max(Q_predict, axis=1)
return o_max, Q_max, Q_predict
def softmax_option_target(self, inputs):
Q_predict = []
n = inputs.shape[0]
for o in range(int(self.option_num)):
action_i = self.predict_actor_target(inputs, o)
Q_predict_i, _ = self.predict_critic_target(inputs, action_i)
if o == 0:
Q_predict = np.reshape( Q_predict_i, (-1, 1) )
else:
Q_predict = np.concatenate((Q_predict, np.reshape(Q_predict_i, (-1, 1)) ), axis= 1)
p = softmax(Q_predict)
o_softmax = p_sample(p)
n = Q_predict.shape[0]
Q_softmax = Q_predict[np.arange(n), o_softmax.flatten()]
return o_softmax, np.reshape(Q_softmax, (n, 1)), Q_predict
def predict_actor_option(self, inputs, option):
return self.sess.run(self.actor_out_list[option], feed_dict={self.actor_state_input_list[option]: inputs})
def predict_actor(self, inputs, options):
action_list = []
for o in range(self.option_num):
action_o = self.predict_actor_option(inputs, o)
action_list.append(action_o)
n = inputs.shape[0]
action = 0
if n == 1 or np.isscalar(options):
action = action_list[options]
# calculate the action
else:
for i in range(n):
if i == 0:
action = action_list[int(options[i])][i, :]
else:
action = np.vstack((action, action_list[int(options[i])][i, :]))
return action
| 39.508333 | 118 | 0.626977 | import os
import torch
import torch.nn.functional as F
import glob
import numpy as np
from torch.optim import Adam
from utils.utils import soft_update, hard_update
from utils.model import GaussianPolicy, QNetwork, DeterministicPolicy
from keras.models import Sequential, Model
from keras.layers import Dense, Dropout, Input, merge, Lambda, Activation
from keras.layers.merge import Add, Multiply, Concatenate, concatenate
from keras.initializers import RandomUniform
from keras.optimizers import Adam
import keras.backend as K
from keras import metrics
def weighted_entropy(p, w_norm):
return K.sum(w_norm * p * K.log(p + 1e-8))
def weighted_mean(p, w_norm):
p_weighted = np.multiply(w_norm, p)
return K.mean(p_weighted, axis=0)
def weighted_mse(Q_target, Q_pred, w_norm):
error = K.square(Q_target - Q_pred)
return K.mean(w_norm * error)
def softmax(x):
col = x.shape[1]
x_max = np.reshape(np.amax(x, axis=1), (-1, 1))
e_x = np.exp(x - np.matlib.repmat(x_max, 1, col) )
e_x_sum = np.reshape( np.sum(e_x, axis=1), (-1, 1))
out = e_x / np.matlib.repmat(e_x_sum, 1, col)
return out
def weighted_mean_array(x, weights):
weights_mean = np.mean(weights, axis=1)
x_weighted = np.multiply(x, weights)
mean_weighted = np.divide(np.mean(x_weighted, axis=1), weights_mean)
return np.reshape(mean_weighted, (-1, 1))
def p_sample(p):
row, col = p.shape
p_sum = np.reshape(np.sum(p, axis=1), (row, 1))
p_normalized = p/np.matlib.repmat(p_sum, 1, col)
p_cumsum = np.matrix(np.cumsum( p_normalized, axis=1))
rand = np.matlib.repmat(np.random.random((row, 1)), 1, col)
o_softmax = np.argmax(p_cumsum >= rand, axis=1)
return o_softmax
def entropy(p):
return K.sum(p * K.log((p + 1e-8)))
def add_normal(x_input, outshape, at_eps):
epsilon = K.random_normal(shape=outshape, mean=0., stddev=1.)
x_out = x_input + at_eps * np.multiply(epsilon, np.absolute(x_input))
return x_out
def kl(p, q):
return K.sum(p * K.log((p + 1e-8) / (q + 1e-8)))
class Multi_SAC(object):
def __init__(self, state_dim, action_dim, option_dim, max_action, action_space):
self.alpha = 0.2
self.lr = 0.0003
self.option_num = option_dim
self.policy_type = "Gaussian"
self.target_update_interval = 1
self.automatic_entropy_tuning = True
self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
self.critic = QNetwork(state_dim, action_dim, 400).to(device=self.device)
self.critic_optim = Adam(self.critic.parameters(), lr=self.lr)
self.critic_target = QNetwork(state_dim, action_dim, 400).to(self.device)
hard_update(self.critic_target, self.critic)
self.sampling_prob = torch.FloatTensor(state).to(self.device)
self.option_state_input, self.option_action_input, self.option_input_concat, self.option_out_dec, \
self.option_out, self.option_out_noise, self.option_model = self.create_option_model()
Advantage = np.stop_gradient(self.target_q_value - self.predicted_v_value)
Weight = np.divide(np.exp(Advantage - np.max(Advantage)), self.sampling_prob)
W_norm = Weight/K.mean(Weight)
critic_conditional_entropy = weighted_entropy(self.option_out, tf.stop_gradient(W_norm))
p_weighted_ave = weighted_mean(self.option_out, tf.stop_gradient(W_norm))
self.critic_entropy = critic_conditional_entropy - self.c_ent * entropy(p_weighted_ave)
self.vat_loss = kl(self.option_out, self.option_out_noise)
self.reg_loss = metrics.mean_absolute_error(self.option_input_concat, self.option_out_dec)
self.option_loss = self.reg_loss + self.entropy_coeff * (self.critic_entropy) + self.c_reg * self.vat_loss
self.option_optimize = tf.train.AdamOptimizer(self.option_lr).minimize(self.option_loss)
self.it = 0
if self.policy_type == "Gaussian":
if self.automatic_entropy_tuning == True:
self.target_entropy = -torch.prod(torch.Tensor(action_space.shape).to(self.device)).item()
self.log_alpha = torch.zeros(1, requires_grad=True, device=self.device)
self.alpha_optim = Adam([self.log_alpha], lr=self.lr)
self.policy = GaussianPolicy(state_dim, action_dim, 400, max_action).to(self.device)
self.policy_optim = Adam(self.policy.parameters(), lr=self.lr)
elif self.policy_type == "Multi_Gaussian":
if self.automatic_entropy_tuning == True:
self.target_entropy = -torch.prod(torch.Tensor(action_space.shape).to(self.device)).item()
self.log_alpha = torch.zeros(1, requires_grad=True, device=self.device)
self.alpha_optim = Adam([self.log_alpha], lr=self.lr)
self.policy = GaussianPolicy(state_dim, action_dim, 400, max_action).to(self.device)
self.policy_optim = Adam(self.policy.parameters(), lr=self.lr)
else:
self.alpha = 0
self.automatic_entropy_tuning = False
self.policy = DeterministicPolicy(state_dim, action_dim, 400, max_action).to(self.device)
self.policy_optim = Adam(self.policy.parameters(), lr=self.lr)
def select_action(self, state, eval=True):
state = torch.FloatTensor(state).to(self.device).unsqueeze(0)
if eval == False:
action, _, _ = self.policy.sample(state)
else:
_, _, action = self.policy.sample(state)
return action.detach().cpu().numpy()[0]
def train_actor_option(self, inputs, a_gradient, option):
self.sess.run(self.actor_optimizer_list[option], feed_dict={
self.actor_state_input_list[option]: inputs,
self.action_gradient_list[option]: a_gradient
})
def train_critic(self, inputs, action, target_q_value, predicted_v_value, sampling_prob):
return self.sess.run([self.critic_optimize], feed_dict={
self.critic_state_input: inputs,
self.critic_action_input: action,
self.target_q_value: target_q_value,
self.predicted_v_value: predicted_v_value,
self.sampling_prob: sampling_prob
})
def train_option(self, inputs, action, target_q_value, predicted_v_value, sampling_prob):
return self.sess.run([self.option_optimize], feed_dict={
self.option_state_input: inputs,
self.option_action_input: action,
self.target_q_value: target_q_value,
self.predicted_v_value: predicted_v_value,
self.sampling_prob: sampling_prob
})
def max_option(self, inputs):
Q_predict = []
n = inputs.shape[0]
for o in range(int(self.option_num)):
action_i = self.predict_actor_target(inputs, o)
Q_predict_i, _ = self.predict_critic_target(inputs, action_i)
if o == 0:
Q_predict = np.reshape(Q_predict_i, (-1, 1))
else:
Q_predict = np.concatenate((Q_predict, np.reshape(Q_predict_i, (-1, 1))), axis=1)
o_max = np.argmax(Q_predict, axis=1)
Q_max = np.max(Q_predict, axis=1)
return o_max, Q_max, Q_predict
def softmax_option_target(self, inputs):
Q_predict = []
n = inputs.shape[0]
for o in range(int(self.option_num)):
action_i = self.predict_actor_target(inputs, o)
Q_predict_i, _ = self.predict_critic_target(inputs, action_i)
if o == 0:
Q_predict = np.reshape( Q_predict_i, (-1, 1) )
else:
Q_predict = np.concatenate((Q_predict, np.reshape(Q_predict_i, (-1, 1)) ), axis= 1)
p = softmax(Q_predict)
o_softmax = p_sample(p)
n = Q_predict.shape[0]
Q_softmax = Q_predict[np.arange(n), o_softmax.flatten()]
return o_softmax, np.reshape(Q_softmax, (n, 1)), Q_predict
def predict_actor_option(self, inputs, option):
return self.sess.run(self.actor_out_list[option], feed_dict={self.actor_state_input_list[option]: inputs})
def predict_actor(self, inputs, options):
action_list = []
for o in range(self.option_num):
action_o = self.predict_actor_option(inputs, o)
action_list.append(action_o)
n = inputs.shape[0]
action = 0
if n == 1 or np.isscalar(options):
action = action_list[options]
else:
for i in range(n):
if i == 0:
action = action_list[int(options[i])][i, :]
else:
action = np.vstack((action, action_list[int(options[i])][i, :]))
return action
| true | true |
f71ce7dd49a7ba035fc78f709cfd97e3e48d60f3 | 1,963 | py | Python | tests/integration/test_image.py | youngjun0627/backend.ai-client-py | be7c174ab73e112fdb8be61e6affc20fc72f7d59 | [
"MIT"
] | 7 | 2019-01-18T08:08:42.000Z | 2022-02-10T00:36:24.000Z | tests/integration/test_image.py | youngjun0627/backend.ai-client-py | be7c174ab73e112fdb8be61e6affc20fc72f7d59 | [
"MIT"
] | 179 | 2017-09-07T04:54:44.000Z | 2022-03-29T11:30:47.000Z | tests/integration/test_image.py | youngjun0627/backend.ai-client-py | be7c174ab73e112fdb8be61e6affc20fc72f7d59 | [
"MIT"
] | 13 | 2017-09-08T05:37:44.000Z | 2021-09-14T23:35:31.000Z | import pytest
from ai.backend.client.exceptions import BackendAPIError
from ai.backend.client.session import Session
# module-level marker
pytestmark = pytest.mark.integration
@pytest.mark.asyncio
async def test_list_images_by_admin():
with Session() as sess:
images = sess.Image.list()
image = images[0]
assert len(images) > 0
assert 'name' in image
assert 'tag' in image
assert 'hash' in image
@pytest.mark.asyncio
async def test_list_images_by_user(userconfig):
with Session() as sess:
images = sess.Image.list()
image = images[0]
assert len(images) > 0
assert 'name' in image
assert 'tag' in image
assert 'hash' in image
# This is invasive...
# async def test_rescan_images():
# pass
@pytest.mark.asyncio
async def test_alias_dealias_image_by_admin():
with Session() as sess:
def get_test_image_info():
items = sess.Image.list(
fields=('name', 'registry', 'tag', 'aliases'))
for item in items:
if 'lua' in item['name'] and '5.1-alpine3.8' in item['tag']:
return item
img_info = get_test_image_info()
test_alias = 'testalias-b9f1ce136f584ca892d5fef3e78dd11d'
test_target = img_info['registry'] + '/' + img_info['name'] + ':' + \
img_info['tag']
sess.Image.aliasImage(test_alias, test_target)
assert get_test_image_info()['aliases'] == [test_alias]
sess.Image.dealiasImage(test_alias)
assert len(get_test_image_info()['aliases']) == 0
@pytest.mark.asyncio
async def test_user_cannot_mutate_alias_dealias(userconfig):
with Session() as sess:
test_alias = 'testalias-b9f1ce136f584ca892d5fef3e78dd11d'
with pytest.raises(BackendAPIError):
sess.Image.aliasImage(test_alias, 'lua:5.1-alpine3.8')
with pytest.raises(BackendAPIError):
sess.Image.dealiasImage(test_alias)
| 30.2 | 77 | 0.65512 | import pytest
from ai.backend.client.exceptions import BackendAPIError
from ai.backend.client.session import Session
pytestmark = pytest.mark.integration
@pytest.mark.asyncio
async def test_list_images_by_admin():
with Session() as sess:
images = sess.Image.list()
image = images[0]
assert len(images) > 0
assert 'name' in image
assert 'tag' in image
assert 'hash' in image
@pytest.mark.asyncio
async def test_list_images_by_user(userconfig):
with Session() as sess:
images = sess.Image.list()
image = images[0]
assert len(images) > 0
assert 'name' in image
assert 'tag' in image
assert 'hash' in image
@pytest.mark.asyncio
async def test_alias_dealias_image_by_admin():
with Session() as sess:
def get_test_image_info():
items = sess.Image.list(
fields=('name', 'registry', 'tag', 'aliases'))
for item in items:
if 'lua' in item['name'] and '5.1-alpine3.8' in item['tag']:
return item
img_info = get_test_image_info()
test_alias = 'testalias-b9f1ce136f584ca892d5fef3e78dd11d'
test_target = img_info['registry'] + '/' + img_info['name'] + ':' + \
img_info['tag']
sess.Image.aliasImage(test_alias, test_target)
assert get_test_image_info()['aliases'] == [test_alias]
sess.Image.dealiasImage(test_alias)
assert len(get_test_image_info()['aliases']) == 0
@pytest.mark.asyncio
async def test_user_cannot_mutate_alias_dealias(userconfig):
with Session() as sess:
test_alias = 'testalias-b9f1ce136f584ca892d5fef3e78dd11d'
with pytest.raises(BackendAPIError):
sess.Image.aliasImage(test_alias, 'lua:5.1-alpine3.8')
with pytest.raises(BackendAPIError):
sess.Image.dealiasImage(test_alias)
| true | true |
f71ce86c8da0066e9a06c434d7f057156813ebaa | 1,012 | bzl | Python | third_party/llvm/workspace.bzl | Georgeiva/tensorflow | 1c0b85a0d49c27a5a5beec26c3e9ffceebb89652 | [
"Apache-2.0"
] | 2 | 2021-06-17T21:26:38.000Z | 2021-06-20T18:25:57.000Z | third_party/llvm/workspace.bzl | craymichael/tensorflow | b5de565c9c57fa7ca02d42bcfe6f470ecf117ba5 | [
"Apache-2.0"
] | null | null | null | third_party/llvm/workspace.bzl | craymichael/tensorflow | b5de565c9c57fa7ca02d42bcfe6f470ecf117ba5 | [
"Apache-2.0"
] | null | null | null | """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "366df11a35392c946678f1af94038945c23f06c8"
LLVM_SHA256 = "cd720387229e8ee74cc9d7d685a298c709fb2bdb2063301e509f40dacbdbaaea"
tf_http_archive(
name = name,
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-" + LLVM_COMMIT,
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
link_files = {
"//third_party/llvm:llvm.autogenerated.BUILD": "llvm/BUILD",
"//third_party/mlir:BUILD": "mlir/BUILD",
"//third_party/mlir:test.BUILD": "mlir/test/BUILD",
},
patch_file = "//third_party/llvm:disable_parallelism_in_verifier.patch",
)
| 40.48 | 149 | 0.655138 |
load("//third_party:repo.bzl", "tf_http_archive")
def repo(name):
LLVM_COMMIT = "366df11a35392c946678f1af94038945c23f06c8"
LLVM_SHA256 = "cd720387229e8ee74cc9d7d685a298c709fb2bdb2063301e509f40dacbdbaaea"
tf_http_archive(
name = name,
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-" + LLVM_COMMIT,
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
link_files = {
"//third_party/llvm:llvm.autogenerated.BUILD": "llvm/BUILD",
"//third_party/mlir:BUILD": "mlir/BUILD",
"//third_party/mlir:test.BUILD": "mlir/test/BUILD",
},
patch_file = "//third_party/llvm:disable_parallelism_in_verifier.patch",
)
| true | true |
f71ce9fb21782d5e8358c36fd801ad537ff4f7fd | 4,469 | py | Python | src/mockdown/instantiation/prolog/logic.py | MangoTeam/mockdown | 6f42395b07a3a83d5a3703d30985ef5a5068bf09 | [
"MIT"
] | null | null | null | src/mockdown/instantiation/prolog/logic.py | MangoTeam/mockdown | 6f42395b07a3a83d5a3703d30985ef5a5068bf09 | [
"MIT"
] | 2 | 2022-01-13T03:52:58.000Z | 2022-03-12T01:03:41.000Z | src/mockdown/instantiation/prolog/logic.py | MangoTeam/mockdown | 6f42395b07a3a83d5a3703d30985ef5a5068bf09 | [
"MIT"
] | null | null | null | import operator
from importlib import resources
from typing import List, Tuple, Generator
from pyswip import Prolog # type: ignore
from mockdown.constraint import ConstraintKind
from mockdown.constraint.factory import ConstraintFactory
from mockdown.model import Attribute, IView, IAnchor, AnchorID
from mockdown.constraint import *
from mockdown.types import NT
def valid_constraints(root: IView[NT], visibilities: List[Tuple[IAnchor[NT], IAnchor[NT]]], debug: bool = True) \
-> Generator[IConstraint, None, None]:
"""
Computes the valid constraint pairs (or singletons) for various
types of constraint.
"""
outfile = "debug.pl"
# Note: Prolog is a singleton!
prolog = Prolog()
try:
with open(outfile, 'w') as dbfile:
# Load static terms/predicates.
with resources.path(__package__, 'logic.pl') as path:
prolog.consult(str(path))
# Add dynamic terms/predicates.
prolog.dynamic('view/1')
prolog.dynamic('parent/2')
prolog.dynamic('visible/2')
for view in root:
prolog.assertz(f"view('{view.name}')")
if debug:
dbfile.write(f"view('{view.name}').\n")
for child in view.children:
prolog.assertz(f"parent('{view.name}', '{child.name}')")
if debug: dbfile.write(f"parent('{view.name}', '{child.name}').\n")
for vis in visibilities:
[a1, a2] = vis
a1_term = f"anchor('{a1.view.name}', '{a1.attribute.value}')"
a2_term = f"anchor('{a2.view.name}', '{a2.attribute.value}')"
prolog.assertz(f"visible({a1_term}, {a2_term})")
if debug: dbfile.write(f"visible({a1_term}, {a2_term}).\n")
# todo: Post-process output? Necessary?
# ops = [operator.le, operator.ge, operator.eq]
ops = [operator.eq]
for answer in prolog.query("aspect_ratio_size(V)"):
v, = [answer[k] for k in ('V',)]
yield ConstraintFactory.create(kind=ConstraintKind.SIZE_ASPECT_RATIO,
x_id=AnchorID(v, Attribute('height')),
y_id=AnchorID(v, Attribute('width')),
op=operator.eq)
for answer in prolog.query("absolute_size(V, A)"):
v, a = [answer[k] for k in ('V', 'A')]
for op in ops:
yield ConstraintFactory.create(kind=ConstraintKind.SIZE_CONSTANT,
x_id=None, y_id=AnchorID(v, Attribute(a)),
op=op)
for answer in prolog.query("parent_relative_size(V, A, W, B)"):
v, a, w, b = [answer[k] for k in ('V', 'A', 'W', 'B')]
yield ConstraintFactory.create(kind=ConstraintKind.SIZE_RATIO,
x_id=AnchorID(v, Attribute(a)),
y_id=AnchorID(w, Attribute(b)),
op=operator.eq)
for answer in prolog.query("spacing(V, A, W, B)"):
v, a, w, b = [answer[k] for k in ('V', 'A', 'W', 'B')]
for op in ops:
yield ConstraintFactory.create(kind=ConstraintKind.POS_LTRB_OFFSET,
x_id=AnchorID(v, Attribute(a)),
y_id=AnchorID(w, Attribute(b)),
op=op)
for answer in prolog.query("alignment(V, A, W, B)"):
v, a, w, b = [answer[k] for k in ('V', 'A', 'W', 'B')]
for op in ops:
yield ConstraintFactory.create(kind=ConstraintKind.POS_LTRB_OFFSET,
x_id=AnchorID(v, Attribute(a)),
y_id=AnchorID(w, Attribute(b)),
op=op)
finally:
# Cleanup dynamic predicates to avoid subsequent calls running in a
# polluted Prolog namespace.
prolog.retractall('view(_)')
prolog.retractall('parent(_,_)')
prolog.retractall('visible(_,_)')
pass
| 44.247525 | 113 | 0.492504 | import operator
from importlib import resources
from typing import List, Tuple, Generator
from pyswip import Prolog
from mockdown.constraint import ConstraintKind
from mockdown.constraint.factory import ConstraintFactory
from mockdown.model import Attribute, IView, IAnchor, AnchorID
from mockdown.constraint import *
from mockdown.types import NT
def valid_constraints(root: IView[NT], visibilities: List[Tuple[IAnchor[NT], IAnchor[NT]]], debug: bool = True) \
-> Generator[IConstraint, None, None]:
outfile = "debug.pl"
prolog = Prolog()
try:
with open(outfile, 'w') as dbfile:
with resources.path(__package__, 'logic.pl') as path:
prolog.consult(str(path))
prolog.dynamic('view/1')
prolog.dynamic('parent/2')
prolog.dynamic('visible/2')
for view in root:
prolog.assertz(f"view('{view.name}')")
if debug:
dbfile.write(f"view('{view.name}').\n")
for child in view.children:
prolog.assertz(f"parent('{view.name}', '{child.name}')")
if debug: dbfile.write(f"parent('{view.name}', '{child.name}').\n")
for vis in visibilities:
[a1, a2] = vis
a1_term = f"anchor('{a1.view.name}', '{a1.attribute.value}')"
a2_term = f"anchor('{a2.view.name}', '{a2.attribute.value}')"
prolog.assertz(f"visible({a1_term}, {a2_term})")
if debug: dbfile.write(f"visible({a1_term}, {a2_term}).\n")
ops = [operator.eq]
for answer in prolog.query("aspect_ratio_size(V)"):
v, = [answer[k] for k in ('V',)]
yield ConstraintFactory.create(kind=ConstraintKind.SIZE_ASPECT_RATIO,
x_id=AnchorID(v, Attribute('height')),
y_id=AnchorID(v, Attribute('width')),
op=operator.eq)
for answer in prolog.query("absolute_size(V, A)"):
v, a = [answer[k] for k in ('V', 'A')]
for op in ops:
yield ConstraintFactory.create(kind=ConstraintKind.SIZE_CONSTANT,
x_id=None, y_id=AnchorID(v, Attribute(a)),
op=op)
for answer in prolog.query("parent_relative_size(V, A, W, B)"):
v, a, w, b = [answer[k] for k in ('V', 'A', 'W', 'B')]
yield ConstraintFactory.create(kind=ConstraintKind.SIZE_RATIO,
x_id=AnchorID(v, Attribute(a)),
y_id=AnchorID(w, Attribute(b)),
op=operator.eq)
for answer in prolog.query("spacing(V, A, W, B)"):
v, a, w, b = [answer[k] for k in ('V', 'A', 'W', 'B')]
for op in ops:
yield ConstraintFactory.create(kind=ConstraintKind.POS_LTRB_OFFSET,
x_id=AnchorID(v, Attribute(a)),
y_id=AnchorID(w, Attribute(b)),
op=op)
for answer in prolog.query("alignment(V, A, W, B)"):
v, a, w, b = [answer[k] for k in ('V', 'A', 'W', 'B')]
for op in ops:
yield ConstraintFactory.create(kind=ConstraintKind.POS_LTRB_OFFSET,
x_id=AnchorID(v, Attribute(a)),
y_id=AnchorID(w, Attribute(b)),
op=op)
finally:
prolog.retractall('view(_)')
prolog.retractall('parent(_,_)')
prolog.retractall('visible(_,_)')
pass
| true | true |
f71cea51ce51965931a25dc006e4538270b7dc22 | 723 | py | Python | QCodes/paliandromLinkedListStack.py | Rakeshgsekhar/DataStructure | 8c7eb4ec02cdba7975b834180c0c66269595bd13 | [
"MIT"
] | null | null | null | QCodes/paliandromLinkedListStack.py | Rakeshgsekhar/DataStructure | 8c7eb4ec02cdba7975b834180c0c66269595bd13 | [
"MIT"
] | null | null | null | QCodes/paliandromLinkedListStack.py | Rakeshgsekhar/DataStructure | 8c7eb4ec02cdba7975b834180c0c66269595bd13 | [
"MIT"
] | null | null | null | # Definition for singly-linked list.
# class ListNode:
# def __init__(self, val=0, next=None):
# self.val = val
# self.next = next
class Solution:
def isPalindrome(self, head: ListNode) :
num = []
temp = head
isPalin = True
# if head is not None and head.next is None:
# return True
while temp is not None:
num.append(temp.val)
temp = temp.next
while head is not None:
stackVal = num.pop()
if head.val == stackVal:
isPalin = True
else:
isPalin = False
break
head = head.next
return isPalin | 27.807692 | 52 | 0.485477 |
class Solution:
def isPalindrome(self, head: ListNode) :
num = []
temp = head
isPalin = True
while temp is not None:
num.append(temp.val)
temp = temp.next
while head is not None:
stackVal = num.pop()
if head.val == stackVal:
isPalin = True
else:
isPalin = False
break
head = head.next
return isPalin | true | true |
f71ceadeedb16d3ad0d95606d9452ed423823bb7 | 6,060 | py | Python | galaxy/corals_database/galaxy_integration/version_1909/lib/galaxy/model/orm/scripts.py | skitchen19/galaxy_tools | b935f36cfe430263564503ebb71f78dc79315acb | [
"MIT"
] | 3 | 2017-04-05T18:01:59.000Z | 2019-05-03T14:15:31.000Z | galaxy/corals_database/galaxy_integration/version_1909/lib/galaxy/model/orm/scripts.py | skitchen19/galaxy_tools | b935f36cfe430263564503ebb71f78dc79315acb | [
"MIT"
] | 6 | 2019-02-27T15:45:58.000Z | 2021-01-12T15:18:50.000Z | galaxy/corals_database/galaxy_integration/version_1909/lib/galaxy/model/orm/scripts.py | skitchen19/galaxy_tools | b935f36cfe430263564503ebb71f78dc79315acb | [
"MIT"
] | 2 | 2018-10-26T18:36:39.000Z | 2019-01-28T15:12:39.000Z | """
Code to support database helper scripts (create_db.py, manage_db.py, etc...).
"""
import argparse
import logging
import os
import sys
from migrate.versioning.shell import main as migrate_main
from galaxy.util.path import get_ext
from galaxy.util.properties import find_config_file, get_data_dir, load_app_properties
from galaxy.util.script import populate_config_args
log = logging.getLogger(__name__)
DEFAULT_CONFIG_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, 'config', 'sample'))
DEFAULT_CONFIG_NAMES = ['galaxy', 'universe_wsgi']
DEFAULT_CONFIG_PREFIX = ''
DEFAULT_DATABASE = 'galaxy'
DATABASE = {
"galaxy":
{
'repo': 'galaxy/model/migrate',
'default_sqlite_file': 'universe.sqlite',
'config_override': 'GALAXY_CONFIG_',
},
"tools":
{
'repo': 'tool_shed/galaxy_install/migrate',
'default_sqlite_file': 'universe.sqlite',
'config_override': 'GALAXY_CONFIG_',
},
"tool_shed":
{
'repo': 'galaxy/webapps/tool_shed/model/migrate',
'config_names': ['tool_shed', 'tool_shed_wsgi'],
'default_sqlite_file': 'community.sqlite',
'config_override': 'TOOL_SHED_CONFIG_',
'config_section': 'tool_shed',
},
"install":
{
'repo': 'galaxy/model/tool_shed_install/migrate',
'config_prefix': 'install_',
'default_sqlite_file': 'install.sqlite',
'config_override': 'GALAXY_INSTALL_CONFIG_',
},
"corals":
{
'repo': 'lib/galaxy/model/corals/migrate',
'config_prefix': 'corals',
'default_sqlite_file': './database/stag.sqlite',
'config_override': 'GALAXY_CORALS_CONFIG_',
},
}
def _read_model_arguments(argv, use_argparse=False):
if use_argparse:
parser = argparse.ArgumentParser()
parser.add_argument('database', metavar='DATABASE', type=str,
default="galaxy",
nargs='?',
help='database to target (galaxy, tool_shed, install)')
populate_config_args(parser)
args = parser.parse_args(argv[1:] if argv else [])
return args.config_file, args.config_section, args.database
else:
config_file = None
for arg in ["-c", "--config", "--config-file"]:
if arg in argv:
pos = argv.index(arg)
argv.pop(pos)
config_file = argv.pop(pos)
config_section = None
if "--config-section" in argv:
pos = argv.index("--config-section")
argv.pop(pos)
config_section = argv.pop(pos)
if argv and (argv[-1] in DATABASE):
database = argv.pop() # database name tool_shed, galaxy, or install.
else:
database = 'galaxy'
return config_file, config_section, database
def get_config(argv, use_argparse=True, cwd=None):
"""
Read sys.argv and parse out repository of migrations and database url.
>>> import os
>>> from six.moves.configparser import SafeConfigParser
>>> from shutil import rmtree
>>> from tempfile import mkdtemp
>>> config_dir = mkdtemp()
>>> os.makedirs(os.path.join(config_dir, 'config'))
>>> def write_ini(path, property, value):
... p = SafeConfigParser()
... p.add_section('app:main')
... p.set('app:main', property, value)
... with open(os.path.join(config_dir, 'config', path), 'w') as f: p.write(f)
>>> write_ini('tool_shed.ini', 'database_connection', 'sqlite:///pg/testdb1')
>>> config = get_config(['manage_db.py', 'tool_shed'], cwd=config_dir)
>>> config['repo'].endswith('galaxy/webapps/tool_shed/model/migrate')
True
>>> config['db_url']
'sqlite:///pg/testdb1'
>>> write_ini('galaxy.ini', 'data_dir', '/moo')
>>> config = get_config(['manage_db.py'], cwd=config_dir)
>>> uri_with_env = os.getenv("GALAXY_TEST_DBURI", "sqlite:////moo/universe.sqlite?isolation_level=IMMEDIATE")
>>> config['db_url'] == uri_with_env
True
>>> config['repo'].endswith('galaxy/model/migrate')
True
>>> rmtree(config_dir)
"""
config_file, config_section, database = _read_model_arguments(argv, use_argparse=use_argparse)
database_defaults = DATABASE[database]
if config_file is None:
config_names = database_defaults.get('config_names', DEFAULT_CONFIG_NAMES)
if cwd:
cwd = [cwd, os.path.join(cwd, 'config')]
else:
cwd = [DEFAULT_CONFIG_DIR]
config_file = find_config_file(config_names, dirs=cwd)
repo = os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir, database_defaults['repo'])
config_prefix = database_defaults.get('config_prefix', DEFAULT_CONFIG_PREFIX)
config_override = database_defaults.get('config_override', 'GALAXY_CONFIG_')
default_sqlite_file = database_defaults['default_sqlite_file']
if config_section is None:
if not config_file or get_ext(config_file, ignore='sample') == 'yaml':
config_section = database_defaults.get('config_section', None)
else:
# Just use the default found by load_app_properties.
config_section = None
properties = load_app_properties(config_file=config_file, config_prefix=config_override, config_section=config_section)
if ("%sdatabase_connection" % config_prefix) in properties:
db_url = properties["%sdatabase_connection" % config_prefix]
else:
db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % os.path.join(get_data_dir(properties), default_sqlite_file)
return dict(db_url=db_url, repo=repo, config_file=config_file, database=database)
def manage_db():
# Migrate has its own args, so cannot use argparse
config = get_config(sys.argv, use_argparse=False, cwd=os.getcwd())
migrate_main(repository=config['repo'], url=config['db_url'])
| 39.350649 | 123 | 0.637129 | import argparse
import logging
import os
import sys
from migrate.versioning.shell import main as migrate_main
from galaxy.util.path import get_ext
from galaxy.util.properties import find_config_file, get_data_dir, load_app_properties
from galaxy.util.script import populate_config_args
log = logging.getLogger(__name__)
DEFAULT_CONFIG_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, 'config', 'sample'))
DEFAULT_CONFIG_NAMES = ['galaxy', 'universe_wsgi']
DEFAULT_CONFIG_PREFIX = ''
DEFAULT_DATABASE = 'galaxy'
DATABASE = {
"galaxy":
{
'repo': 'galaxy/model/migrate',
'default_sqlite_file': 'universe.sqlite',
'config_override': 'GALAXY_CONFIG_',
},
"tools":
{
'repo': 'tool_shed/galaxy_install/migrate',
'default_sqlite_file': 'universe.sqlite',
'config_override': 'GALAXY_CONFIG_',
},
"tool_shed":
{
'repo': 'galaxy/webapps/tool_shed/model/migrate',
'config_names': ['tool_shed', 'tool_shed_wsgi'],
'default_sqlite_file': 'community.sqlite',
'config_override': 'TOOL_SHED_CONFIG_',
'config_section': 'tool_shed',
},
"install":
{
'repo': 'galaxy/model/tool_shed_install/migrate',
'config_prefix': 'install_',
'default_sqlite_file': 'install.sqlite',
'config_override': 'GALAXY_INSTALL_CONFIG_',
},
"corals":
{
'repo': 'lib/galaxy/model/corals/migrate',
'config_prefix': 'corals',
'default_sqlite_file': './database/stag.sqlite',
'config_override': 'GALAXY_CORALS_CONFIG_',
},
}
def _read_model_arguments(argv, use_argparse=False):
if use_argparse:
parser = argparse.ArgumentParser()
parser.add_argument('database', metavar='DATABASE', type=str,
default="galaxy",
nargs='?',
help='database to target (galaxy, tool_shed, install)')
populate_config_args(parser)
args = parser.parse_args(argv[1:] if argv else [])
return args.config_file, args.config_section, args.database
else:
config_file = None
for arg in ["-c", "--config", "--config-file"]:
if arg in argv:
pos = argv.index(arg)
argv.pop(pos)
config_file = argv.pop(pos)
config_section = None
if "--config-section" in argv:
pos = argv.index("--config-section")
argv.pop(pos)
config_section = argv.pop(pos)
if argv and (argv[-1] in DATABASE):
database = argv.pop()
else:
database = 'galaxy'
return config_file, config_section, database
def get_config(argv, use_argparse=True, cwd=None):
config_file, config_section, database = _read_model_arguments(argv, use_argparse=use_argparse)
database_defaults = DATABASE[database]
if config_file is None:
config_names = database_defaults.get('config_names', DEFAULT_CONFIG_NAMES)
if cwd:
cwd = [cwd, os.path.join(cwd, 'config')]
else:
cwd = [DEFAULT_CONFIG_DIR]
config_file = find_config_file(config_names, dirs=cwd)
repo = os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir, database_defaults['repo'])
config_prefix = database_defaults.get('config_prefix', DEFAULT_CONFIG_PREFIX)
config_override = database_defaults.get('config_override', 'GALAXY_CONFIG_')
default_sqlite_file = database_defaults['default_sqlite_file']
if config_section is None:
if not config_file or get_ext(config_file, ignore='sample') == 'yaml':
config_section = database_defaults.get('config_section', None)
else:
config_section = None
properties = load_app_properties(config_file=config_file, config_prefix=config_override, config_section=config_section)
if ("%sdatabase_connection" % config_prefix) in properties:
db_url = properties["%sdatabase_connection" % config_prefix]
else:
db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % os.path.join(get_data_dir(properties), default_sqlite_file)
return dict(db_url=db_url, repo=repo, config_file=config_file, database=database)
def manage_db():
config = get_config(sys.argv, use_argparse=False, cwd=os.getcwd())
migrate_main(repository=config['repo'], url=config['db_url'])
| true | true |
f71ceba5ece68ddf805973aced707d2db383d881 | 2,136 | py | Python | test/IDL/IDLSUFFIXES.py | andrewyoung1991/scons | 7517c277e23bc04e3809a9bf0793cdfe00097a58 | [
"MIT"
] | 1 | 2015-11-04T22:22:10.000Z | 2015-11-04T22:22:10.000Z | test/IDL/IDLSUFFIXES.py | azverkan/scons | 704ddb9270e14c7771d0c58c04c7afa7bc009603 | [
"MIT"
] | null | null | null | test/IDL/IDLSUFFIXES.py | azverkan/scons | 704ddb9270e14c7771d0c58c04c7afa7bc009603 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Test that we can add filesuffixes to $IDLSUFFIXES.
"""
import TestSCons
test = TestSCons.TestSCons()
test.write('SConstruct', """
import SCons.Scanner.IDL
env = Environment(CPPPATH=['.'])
env.Append(SCANNERS = [ SCons.Scanner.IDL.IDLScan() ],
IDLSUFFIXES = ['.x'])
env.InstallAs('foo_idl', 'foo.idl')
env.InstallAs('foo_x', 'foo.x')
""")
test.write('foo.idl', """\
import <foo.h>
""")
test.write('foo.x', """\
#include <foo.h>
""")
test.write('foo.h', "foo.h 1\n")
test.run(arguments='.', stdout=test.wrap_stdout("""\
Install file: "foo.idl" as "foo_idl"
Install file: "foo.x" as "foo_x"
"""))
test.up_to_date(arguments='.')
test.write('foo.h', "foo.h 2\n")
test.run(arguments='.', stdout=test.wrap_stdout("""\
Install file: "foo.idl" as "foo_idl"
Install file: "foo.x" as "foo_x"
"""))
test.up_to_date(arguments='.')
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| 27.74026 | 73 | 0.719101 |
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import TestSCons
test = TestSCons.TestSCons()
test.write('SConstruct', """
import SCons.Scanner.IDL
env = Environment(CPPPATH=['.'])
env.Append(SCANNERS = [ SCons.Scanner.IDL.IDLScan() ],
IDLSUFFIXES = ['.x'])
env.InstallAs('foo_idl', 'foo.idl')
env.InstallAs('foo_x', 'foo.x')
""")
test.write('foo.idl', """\
import <foo.h>
""")
test.write('foo.x', """\
#include <foo.h>
""")
test.write('foo.h', "foo.h 1\n")
test.run(arguments='.', stdout=test.wrap_stdout("""\
Install file: "foo.idl" as "foo_idl"
Install file: "foo.x" as "foo_x"
"""))
test.up_to_date(arguments='.')
test.write('foo.h', "foo.h 2\n")
test.run(arguments='.', stdout=test.wrap_stdout("""\
Install file: "foo.idl" as "foo_idl"
Install file: "foo.x" as "foo_x"
"""))
test.up_to_date(arguments='.')
test.pass_test()
| true | true |
f71cec55b1240dbdb58f8cf34afefd358b086e1e | 4,909 | py | Python | src/cicflowmeter/flow_session.py | ZhuMon/cicflowmeter | ca31b0d4461754f421610a5ce75eb8914753d74e | [
"MIT"
] | null | null | null | src/cicflowmeter/flow_session.py | ZhuMon/cicflowmeter | ca31b0d4461754f421610a5ce75eb8914753d74e | [
"MIT"
] | null | null | null | src/cicflowmeter/flow_session.py | ZhuMon/cicflowmeter | ca31b0d4461754f421610a5ce75eb8914753d74e | [
"MIT"
] | null | null | null | import csv
import time
from collections import defaultdict
from scapy.sessions import DefaultSession
from scapy.all import wrpcap
from .features.context.packet_direction import PacketDirection
from .features.context.packet_flow_key import get_packet_flow_key
from .flow import Flow
EXPIRED_UPDATE = 40
MACHINE_LEARNING_API = "http://localhost:8000/predict"
GARBAGE_COLLECT_PACKETS = 100
class FlowSession(DefaultSession):
"""Creates a list of network flows."""
def __init__(self, *args, **kwargs):
self.flows = {}
self.csv_line = 0
if self.output_mode == "flow":
output = open(self.output_file, "w")
self.csv_writer = csv.writer(output)
self.pcap_file = self.output_file[:-4]+'.pcap'
self.packets_count = 0
self.clumped_flows_per_label = defaultdict(list)
super(FlowSession, self).__init__(*args, **kwargs)
def toPacketList(self):
# Sniffer finished all the packets it needed to sniff.
# It is not a good place for this, we need to somehow define a finish signal for AsyncSniffer
self.garbage_collect(None)
return super(FlowSession, self).toPacketList()
def on_packet_received(self, packet):
count = 0
direction = PacketDirection.FORWARD
if self.output_mode != "flow":
if "TCP" not in packet:
return
elif "UDP" not in packet:
return
try:
# Creates a key variable to check
packet_flow_key = get_packet_flow_key(packet, direction)
flow = self.flows.get((packet_flow_key, count))
except Exception:
return
self.packets_count += 1
wrpcap(self.pcap_file, packet, append=True)
# If there is no forward flow with a count of 0
if flow is None:
# There might be one of it in reverse
direction = PacketDirection.REVERSE
packet_flow_key = get_packet_flow_key(packet, direction)
flow = self.flows.get((packet_flow_key, count))
if flow is None:
# If no flow exists create a new flow
direction = PacketDirection.FORWARD
flow = Flow(packet, direction)
packet_flow_key = get_packet_flow_key(packet, direction)
self.flows[(packet_flow_key, count)] = flow
elif (packet.time - flow.latest_timestamp) > EXPIRED_UPDATE:
# If the packet exists in the flow but the packet is sent
# after too much of a delay than it is a part of a new flow.
expired = EXPIRED_UPDATE
while (packet.time - flow.latest_timestamp) > expired:
count += 1
expired += EXPIRED_UPDATE
flow = self.flows.get((packet_flow_key, count))
if flow is None:
flow = Flow(packet, direction)
self.flows[(packet_flow_key, count)] = flow
break
elif "TCP" in packet and "F" in str(packet.flags):
# If it has FIN flag then early collect flow and continue
flow.add_packet(packet, direction)
self.garbage_collect(packet.time)
return
flow.add_packet(packet, direction)
if not self.url_model:
GARBAGE_COLLECT_PACKETS = 10000
if self.packets_count % GARBAGE_COLLECT_PACKETS == 0 or (
flow.duration > 120 and self.output_mode == "flow"
):
self.garbage_collect(packet.time)
def get_flows(self) -> list:
return self.flows.values()
def garbage_collect(self, latest_time) -> None:
localtime = time.asctime( time.localtime(time.time()) )
print(localtime, latest_time)
# TODO: Garbage Collection / Feature Extraction should have a separate thread
if not self.url_model:
print("Garbage Collection Began. Flows = {}".format(len(self.flows)))
keys = list(self.flows.keys())
for k in keys:
flow = self.flows.get(k)
if (
latest_time is None
or latest_time - flow.latest_timestamp > EXPIRED_UPDATE
or flow.duration > 90
):
data = flow.get_data()
if self.csv_line == 0:
self.csv_writer.writerow(data.keys())
self.csv_writer.writerow(data.values())
self.csv_line += 1
del self.flows[k]
if not self.url_model:
print("Garbage Collection Finished. Flows = {}".format(len(self.flows)))
def generate_session_class(output_mode, output_file, url_model):
return type(
"NewFlowSession",
(FlowSession,),
{
"output_mode": output_mode,
"output_file": output_file,
"url_model": url_model,
},
)
| 33.855172 | 101 | 0.598696 | import csv
import time
from collections import defaultdict
from scapy.sessions import DefaultSession
from scapy.all import wrpcap
from .features.context.packet_direction import PacketDirection
from .features.context.packet_flow_key import get_packet_flow_key
from .flow import Flow
EXPIRED_UPDATE = 40
MACHINE_LEARNING_API = "http://localhost:8000/predict"
GARBAGE_COLLECT_PACKETS = 100
class FlowSession(DefaultSession):
def __init__(self, *args, **kwargs):
self.flows = {}
self.csv_line = 0
if self.output_mode == "flow":
output = open(self.output_file, "w")
self.csv_writer = csv.writer(output)
self.pcap_file = self.output_file[:-4]+'.pcap'
self.packets_count = 0
self.clumped_flows_per_label = defaultdict(list)
super(FlowSession, self).__init__(*args, **kwargs)
def toPacketList(self):
self.garbage_collect(None)
return super(FlowSession, self).toPacketList()
def on_packet_received(self, packet):
count = 0
direction = PacketDirection.FORWARD
if self.output_mode != "flow":
if "TCP" not in packet:
return
elif "UDP" not in packet:
return
try:
packet_flow_key = get_packet_flow_key(packet, direction)
flow = self.flows.get((packet_flow_key, count))
except Exception:
return
self.packets_count += 1
wrpcap(self.pcap_file, packet, append=True)
if flow is None:
direction = PacketDirection.REVERSE
packet_flow_key = get_packet_flow_key(packet, direction)
flow = self.flows.get((packet_flow_key, count))
if flow is None:
direction = PacketDirection.FORWARD
flow = Flow(packet, direction)
packet_flow_key = get_packet_flow_key(packet, direction)
self.flows[(packet_flow_key, count)] = flow
elif (packet.time - flow.latest_timestamp) > EXPIRED_UPDATE:
expired = EXPIRED_UPDATE
while (packet.time - flow.latest_timestamp) > expired:
count += 1
expired += EXPIRED_UPDATE
flow = self.flows.get((packet_flow_key, count))
if flow is None:
flow = Flow(packet, direction)
self.flows[(packet_flow_key, count)] = flow
break
elif "TCP" in packet and "F" in str(packet.flags):
flow.add_packet(packet, direction)
self.garbage_collect(packet.time)
return
flow.add_packet(packet, direction)
if not self.url_model:
GARBAGE_COLLECT_PACKETS = 10000
if self.packets_count % GARBAGE_COLLECT_PACKETS == 0 or (
flow.duration > 120 and self.output_mode == "flow"
):
self.garbage_collect(packet.time)
def get_flows(self) -> list:
return self.flows.values()
def garbage_collect(self, latest_time) -> None:
localtime = time.asctime( time.localtime(time.time()) )
print(localtime, latest_time)
if not self.url_model:
print("Garbage Collection Began. Flows = {}".format(len(self.flows)))
keys = list(self.flows.keys())
for k in keys:
flow = self.flows.get(k)
if (
latest_time is None
or latest_time - flow.latest_timestamp > EXPIRED_UPDATE
or flow.duration > 90
):
data = flow.get_data()
if self.csv_line == 0:
self.csv_writer.writerow(data.keys())
self.csv_writer.writerow(data.values())
self.csv_line += 1
del self.flows[k]
if not self.url_model:
print("Garbage Collection Finished. Flows = {}".format(len(self.flows)))
def generate_session_class(output_mode, output_file, url_model):
return type(
"NewFlowSession",
(FlowSession,),
{
"output_mode": output_mode,
"output_file": output_file,
"url_model": url_model,
},
)
| true | true |
f71cec880a8d2569d0b6f4835d13dbc7f5354126 | 47,163 | py | Python | python/ccxt/binance.py | ivankia/phpbot | 78e68a748171b68408f86a436188baafd71135bc | [
"MIT"
] | 13 | 2019-01-26T14:41:37.000Z | 2022-03-26T03:33:12.000Z | python/ccxt/binance.py | tiancai110a/ccxt | 2521001c6c3ba4078b580b692f3256319198b10a | [
"MIT"
] | null | null | null | python/ccxt/binance.py | tiancai110a/ccxt | 2521001c6c3ba4078b580b692f3256319198b10a | [
"MIT"
] | 12 | 2018-12-24T02:19:02.000Z | 2022-03-26T05:04:25.000Z | # -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.base.exchange import Exchange
import math
import json
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import DDoSProtection
from ccxt.base.errors import ExchangeNotAvailable
from ccxt.base.errors import InvalidNonce
class binance (Exchange):
def describe(self):
return self.deep_extend(super(binance, self).describe(), {
'id': 'binance',
'name': 'Binance',
'countries': ['JP'], # Japan
'rateLimit': 500,
'certified': True,
# new metainfo interface
'has': {
'fetchDepositAddress': True,
'CORS': False,
'fetchBidsAsks': True,
'fetchTickers': True,
'fetchOHLCV': True,
'fetchMyTrades': True,
'fetchOrder': True,
'fetchOrders': True,
'fetchOpenOrders': True,
'fetchClosedOrders': True,
'withdraw': True,
'fetchFundingFees': True,
'fetchDeposits': True,
'fetchWithdrawals': True,
'fetchTransactions': False,
},
'timeframes': {
'1m': '1m',
'3m': '3m',
'5m': '5m',
'15m': '15m',
'30m': '30m',
'1h': '1h',
'2h': '2h',
'4h': '4h',
'6h': '6h',
'8h': '8h',
'12h': '12h',
'1d': '1d',
'3d': '3d',
'1w': '1w',
'1M': '1M',
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/29604020-d5483cdc-87ee-11e7-94c7-d1a8d9169293.jpg',
'api': {
'web': 'https://www.binance.com',
'wapi': 'https://api.binance.com/wapi/v3',
'public': 'https://api.binance.com/api/v1',
'private': 'https://api.binance.com/api/v3',
'v3': 'https://api.binance.com/api/v3',
'v1': 'https://api.binance.com/api/v1',
},
'www': 'https://www.binance.com',
'referral': 'https://www.binance.com/?ref=10205187',
'doc': 'https://github.com/binance-exchange/binance-official-api-docs/blob/master/rest-api.md',
'fees': 'https://www.binance.com/en/fee/schedule',
},
'api': {
'web': {
'get': [
'exchange/public/product',
'assetWithdraw/getAllAsset.html',
],
},
'wapi': {
'post': [
'withdraw',
],
'get': [
'depositHistory',
'withdrawHistory',
'depositAddress',
'accountStatus',
'systemStatus',
'userAssetDribbletLog',
'tradeFee',
'assetDetail',
],
},
'v3': {
'get': [
'ticker/price',
'ticker/bookTicker',
],
},
'public': {
'get': [
'exchangeInfo',
'ping',
'time',
'depth',
'aggTrades',
'klines',
'ticker/24hr',
'ticker/allPrices',
'ticker/allBookTickers',
'ticker/price',
'ticker/bookTicker',
'exchangeInfo',
],
'put': ['userDataStream'],
'post': ['userDataStream'],
'delete': ['userDataStream'],
},
'private': {
'get': [
'order',
'openOrders',
'allOrders',
'account',
'myTrades',
],
'post': [
'order',
'order/test',
],
'delete': [
'order',
],
},
},
'fees': {
'trading': {
'tierBased': False,
'percentage': True,
'taker': 0.001,
'maker': 0.001,
},
# should be deleted, these are outdated and inaccurate
'funding': {
'tierBased': False,
'percentage': False,
'withdraw': {
'ADA': 1.0,
'ADX': 4.7,
'AION': 1.9,
'AMB': 11.4,
'APPC': 6.5,
'ARK': 0.1,
'ARN': 3.1,
'AST': 10.0,
'BAT': 18.0,
'BCD': 1.0,
'BCH': 0.001,
'BCPT': 10.2,
'BCX': 1.0,
'BNB': 0.7,
'BNT': 1.5,
'BQX': 1.6,
'BRD': 6.4,
'BTC': 0.001,
'BTG': 0.001,
'BTM': 5.0,
'BTS': 1.0,
'CDT': 67.0,
'CMT': 37.0,
'CND': 47.0,
'CTR': 5.4,
'DASH': 0.002,
'DGD': 0.06,
'DLT': 11.7,
'DNT': 51.0,
'EDO': 2.5,
'ELF': 6.5,
'ENG': 2.1,
'ENJ': 42.0,
'EOS': 1.0,
'ETC': 0.01,
'ETF': 1.0,
'ETH': 0.01,
'EVX': 2.5,
'FUEL': 45.0,
'FUN': 85.0,
'GAS': 0,
'GTO': 20.0,
'GVT': 0.53,
'GXS': 0.3,
'HCC': 0.0005,
'HSR': 0.0001,
'ICN': 3.5,
'ICX': 1.3,
'INS': 1.5,
'IOTA': 0.5,
'KMD': 0.002,
'KNC': 2.6,
'LEND': 54.0,
'LINK': 12.8,
'LLT': 54.0,
'LRC': 9.1,
'LSK': 0.1,
'LTC': 0.01,
'LUN': 0.29,
'MANA': 74.0,
'MCO': 0.86,
'MDA': 4.7,
'MOD': 2.0,
'MTH': 34.0,
'MTL': 1.9,
'NAV': 0.2,
'NEBL': 0.01,
'NEO': 0.0,
'NULS': 2.1,
'OAX': 8.3,
'OMG': 0.57,
'OST': 17.0,
'POE': 88.0,
'POWR': 8.6,
'PPT': 0.25,
'QSP': 21.0,
'QTUM': 0.01,
'RCN': 35.0,
'RDN': 2.2,
'REQ': 18.1,
'RLC': 4.1,
'SALT': 1.3,
'SBTC': 1.0,
'SNGLS': 42,
'SNM': 29.0,
'SNT': 32.0,
'STORJ': 5.9,
'STRAT': 0.1,
'SUB': 7.4,
'TNB': 82.0,
'TNT': 47.0,
'TRIG': 6.7,
'TRX': 129.0,
'USDT': 23.0,
'VEN': 1.8,
'VIB': 28.0,
'VIBE': 7.2,
'WABI': 3.5,
'WAVES': 0.002,
'WINGS': 9.3,
'WTC': 0.5,
'XLM': 0.01,
'XMR': 0.04,
'XRP': 0.25,
'XVG': 0.1,
'XZC': 0.02,
'YOYOW': 39.0,
'ZEC': 0.005,
'ZRX': 5.7,
},
'deposit': {},
},
},
'commonCurrencies': {
'YOYO': 'YOYOW',
'BCC': 'BCH',
},
# exchange-specific options
'options': {
'defaultTimeInForce': 'GTC', # 'GTC' = Good To Cancel(default), 'IOC' = Immediate Or Cancel
'defaultLimitOrderType': 'limit', # or 'limit_maker'
'hasAlreadyAuthenticatedSuccessfully': False,
'warnOnFetchOpenOrdersWithoutSymbol': True,
'recvWindow': 5 * 1000, # 5 sec, binance default
'timeDifference': 0, # the difference between system clock and Binance clock
'adjustForTimeDifference': False, # controls the adjustment logic upon instantiation
'parseOrderToPrecision': False, # force amounts and costs in parseOrder to precision
'newOrderRespType': 'RESULT', # 'ACK' for order id, 'RESULT' for full order or 'FULL' for order with fills
},
'exceptions': {
'-1000': ExchangeNotAvailable, # {"code":-1000,"msg":"An unknown error occured while processing the request."}
'-1013': InvalidOrder, # createOrder -> 'invalid quantity'/'invalid price'/MIN_NOTIONAL
'-1021': InvalidNonce, # 'your time is ahead of server'
'-1022': AuthenticationError, # {"code":-1022,"msg":"Signature for self request is not valid."}
'-1100': InvalidOrder, # createOrder(symbol, 1, asdf) -> 'Illegal characters found in parameter 'price'
'-1104': ExchangeError, # Not all sent parameters were read, read 8 parameters but was sent 9
'-1128': ExchangeError, # {"code":-1128,"msg":"Combination of optional parameters invalid."}
'-2010': ExchangeError, # generic error code for createOrder -> 'Account has insufficient balance for requested action.', {"code":-2010,"msg":"Rest API trading is not enabled."}, etc...
'-2011': OrderNotFound, # cancelOrder(1, 'BTC/USDT') -> 'UNKNOWN_ORDER'
'-2013': OrderNotFound, # fetchOrder(1, 'BTC/USDT') -> 'Order does not exist'
'-2014': AuthenticationError, # {"code":-2014, "msg": "API-key format invalid."}
'-2015': AuthenticationError, # "Invalid API-key, IP, or permissions for action."
},
})
def nonce(self):
return self.milliseconds() - self.options['timeDifference']
def load_time_difference(self):
response = self.publicGetTime()
after = self.milliseconds()
self.options['timeDifference'] = int(after - response['serverTime'])
return self.options['timeDifference']
def fetch_markets(self):
response = self.publicGetExchangeInfo()
if self.options['adjustForTimeDifference']:
self.load_time_difference()
markets = response['symbols']
result = []
for i in range(0, len(markets)):
market = markets[i]
id = market['symbol']
# "123456" is a "test symbol/market"
if id == '123456':
continue
baseId = market['baseAsset']
quoteId = market['quoteAsset']
base = self.common_currency_code(baseId)
quote = self.common_currency_code(quoteId)
symbol = base + '/' + quote
filters = self.index_by(market['filters'], 'filterType')
precision = {
'base': market['baseAssetPrecision'],
'quote': market['quotePrecision'],
'amount': market['baseAssetPrecision'],
'price': market['quotePrecision'],
}
active = (market['status'] == 'TRADING')
entry = {
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'baseId': baseId,
'quoteId': quoteId,
'info': market,
'active': active,
'precision': precision,
'limits': {
'amount': {
'min': math.pow(10, -precision['amount']),
'max': None,
},
'price': {
'min': math.pow(10, -precision['price']),
'max': None,
},
'cost': {
'min': -1 * math.log10(precision['amount']),
'max': None,
},
},
}
if 'PRICE_FILTER' in filters:
filter = filters['PRICE_FILTER']
entry['precision']['price'] = self.precision_from_string(filter['tickSize'])
entry['limits']['price'] = {
'min': self.safe_float(filter, 'minPrice'),
'max': self.safe_float(filter, 'maxPrice'),
}
if 'LOT_SIZE' in filters:
filter = filters['LOT_SIZE']
entry['precision']['amount'] = self.precision_from_string(filter['stepSize'])
entry['limits']['amount'] = {
'min': self.safe_float(filter, 'minQty'),
'max': self.safe_float(filter, 'maxQty'),
}
if 'MIN_NOTIONAL' in filters:
entry['limits']['cost']['min'] = float(filters['MIN_NOTIONAL']['minNotional'])
result.append(entry)
return result
def calculate_fee(self, symbol, type, side, amount, price, takerOrMaker='taker', params={}):
market = self.markets[symbol]
key = 'quote'
rate = market[takerOrMaker]
cost = float(self.cost_to_precision(symbol, amount * rate))
if side == 'sell':
cost *= price
else:
key = 'base'
return {
'type': takerOrMaker,
'currency': market[key],
'rate': rate,
'cost': float(self.fee_to_precision(symbol, cost)),
}
def fetch_balance(self, params={}):
self.load_markets()
response = self.privateGetAccount(params)
result = {'info': response}
balances = response['balances']
for i in range(0, len(balances)):
balance = balances[i]
currency = balance['asset']
if currency in self.currencies_by_id:
currency = self.currencies_by_id[currency]['code']
account = {
'free': float(balance['free']),
'used': float(balance['locked']),
'total': 0.0,
}
account['total'] = self.sum(account['free'], account['used'])
result[currency] = account
return self.parse_balance(result)
def fetch_order_book(self, symbol, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
if limit is not None:
request['limit'] = limit # default = maximum = 100
response = self.publicGetDepth(self.extend(request, params))
orderbook = self.parse_order_book(response)
orderbook['nonce'] = self.safe_integer(response, 'lastUpdateId')
return orderbook
def parse_ticker(self, ticker, market=None):
timestamp = self.safe_integer(ticker, 'closeTime')
iso8601 = None if (timestamp is None) else self.iso8601(timestamp)
symbol = self.find_symbol(self.safe_string(ticker, 'symbol'), market)
last = self.safe_float(ticker, 'lastPrice')
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': iso8601,
'high': self.safe_float(ticker, 'highPrice'),
'low': self.safe_float(ticker, 'lowPrice'),
'bid': self.safe_float(ticker, 'bidPrice'),
'bidVolume': self.safe_float(ticker, 'bidQty'),
'ask': self.safe_float(ticker, 'askPrice'),
'askVolume': self.safe_float(ticker, 'askQty'),
'vwap': self.safe_float(ticker, 'weightedAvgPrice'),
'open': self.safe_float(ticker, 'openPrice'),
'close': last,
'last': last,
'previousClose': self.safe_float(ticker, 'prevClosePrice'), # previous day close
'change': self.safe_float(ticker, 'priceChange'),
'percentage': self.safe_float(ticker, 'priceChangePercent'),
'average': None,
'baseVolume': self.safe_float(ticker, 'volume'),
'quoteVolume': self.safe_float(ticker, 'quoteVolume'),
'info': ticker,
}
def fetch_ticker(self, symbol, params={}):
self.load_markets()
market = self.market(symbol)
response = self.publicGetTicker24hr(self.extend({
'symbol': market['id'],
}, params))
return self.parse_ticker(response, market)
def parse_tickers(self, rawTickers, symbols=None):
tickers = []
for i in range(0, len(rawTickers)):
tickers.append(self.parse_ticker(rawTickers[i]))
return self.filter_by_array(tickers, 'symbol', symbols)
def fetch_bids_asks(self, symbols=None, params={}):
self.load_markets()
rawTickers = self.publicGetTickerBookTicker(params)
return self.parse_tickers(rawTickers, symbols)
def fetch_tickers(self, symbols=None, params={}):
self.load_markets()
rawTickers = self.publicGetTicker24hr(params)
return self.parse_tickers(rawTickers, symbols)
def parse_ohlcv(self, ohlcv, market=None, timeframe='1m', since=None, limit=None):
return [
ohlcv[0],
float(ohlcv[1]),
float(ohlcv[2]),
float(ohlcv[3]),
float(ohlcv[4]),
float(ohlcv[5]),
]
def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
'interval': self.timeframes[timeframe],
}
if since is not None:
request['startTime'] = since
if limit is not None:
request['limit'] = limit # default == max == 500
response = self.publicGetKlines(self.extend(request, params))
return self.parse_ohlcvs(response, market, timeframe, since, limit)
def parse_trade(self, trade, market=None):
timestampField = 'T' if ('T' in list(trade.keys())) else 'time'
timestamp = self.safe_integer(trade, timestampField)
priceField = 'p' if ('p' in list(trade.keys())) else 'price'
price = self.safe_float(trade, priceField)
amountField = 'q' if ('q' in list(trade.keys())) else 'qty'
amount = self.safe_float(trade, amountField)
idField = 'a' if ('a' in list(trade.keys())) else 'id'
id = self.safe_string(trade, idField)
side = None
order = None
if 'orderId' in trade:
order = self.safe_string(trade, 'orderId')
if 'm' in trade:
side = 'sell' if trade['m'] else 'buy' # self is reversed intentionally
else:
if 'isBuyer' in trade:
side = 'buy' if (trade['isBuyer']) else 'sell' # self is a True side
fee = None
if 'commission' in trade:
fee = {
'cost': self.safe_float(trade, 'commission'),
'currency': self.common_currency_code(trade['commissionAsset']),
}
takerOrMaker = None
if 'isMaker' in trade:
takerOrMaker = 'maker' if trade['isMaker'] else 'taker'
symbol = None
if market is None:
marketId = self.safe_string(trade, 'symbol')
market = self.safe_value(self.markets_by_id, marketId)
if market is not None:
symbol = market['symbol']
return {
'info': trade,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'id': id,
'order': order,
'type': None,
'takerOrMaker': takerOrMaker,
'side': side,
'price': price,
'cost': price * amount,
'amount': amount,
'fee': fee,
}
def fetch_trades(self, symbol, since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
if since is not None:
request['startTime'] = since
request['endTime'] = self.sum(since, 3600000)
if limit is not None:
request['limit'] = limit
# 'fromId': 123, # ID to get aggregate trades from INCLUSIVE.
# 'startTime': 456, # Timestamp in ms to get aggregate trades from INCLUSIVE.
# 'endTime': 789, # Timestamp in ms to get aggregate trades until INCLUSIVE.
# 'limit': 500, # default = 500, maximum = 1000
#
# Caveats:
# - default limit(500) applies only if no other parameters set, trades up
# to the maximum limit may be returned to satisfy other parameters
# - if both limit and time window is set and time window contains more
# trades than the limit then the last trades from the window are returned
# - 'tradeId' accepted and returned by self method is "aggregate" trade id
# which is different from actual trade id
# - setting both fromId and time window results in error
response = self.publicGetAggTrades(self.extend(request, params))
return self.parse_trades(response, market, since, limit)
def parse_order_status(self, status):
statuses = {
'NEW': 'open',
'PARTIALLY_FILLED': 'open',
'FILLED': 'closed',
'CANCELED': 'canceled',
}
return statuses[status] if (status in list(statuses.keys())) else status
def parse_order(self, order, market=None):
status = self.parse_order_status(self.safe_string(order, 'status'))
symbol = self.find_symbol(self.safe_string(order, 'symbol'), market)
timestamp = None
if 'time' in order:
timestamp = order['time']
elif 'transactTime' in order:
timestamp = order['transactTime']
price = self.safe_float(order, 'price')
amount = self.safe_float(order, 'origQty')
filled = self.safe_float(order, 'executedQty')
remaining = None
cost = self.safe_float(order, 'cummulativeQuoteQty')
if filled is not None:
if amount is not None:
remaining = amount - filled
if self.options['parseOrderToPrecision']:
remaining = float(self.amount_to_precision(symbol, remaining))
remaining = max(remaining, 0.0)
if price is not None:
if cost is None:
cost = price * filled
id = self.safe_string(order, 'orderId')
type = self.safe_string(order, 'type')
if type is not None:
type = type.lower()
if type == 'market':
if price == 0.0:
if (cost is not None) and(filled is not None):
if (cost > 0) and(filled > 0):
price = cost / filled
side = self.safe_string(order, 'side')
if side is not None:
side = side.lower()
fee = None
trades = None
fills = self.safe_value(order, 'fills')
if fills is not None:
trades = self.parse_trades(fills, market)
numTrades = len(trades)
if numTrades > 0:
cost = trades[0]['cost']
fee = {
'cost': trades[0]['fee']['cost'],
'currency': trades[0]['fee']['currency'],
}
for i in range(1, len(trades)):
cost = self.sum(cost, trades[i]['cost'])
fee['cost'] = self.sum(fee['cost'], trades[i]['fee']['cost'])
average = None
if cost is not None:
if filled:
average = cost / filled
if self.options['parseOrderToPrecision']:
cost = float(self.cost_to_precision(symbol, cost))
result = {
'info': order,
'id': id,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': None,
'symbol': symbol,
'type': type,
'side': side,
'price': price,
'amount': amount,
'cost': cost,
'average': average,
'filled': filled,
'remaining': remaining,
'status': status,
'fee': fee,
'trades': trades,
}
return result
def create_order(self, symbol, type, side, amount, price=None, params={}):
self.load_markets()
market = self.market(symbol)
# the next 5 lines are added to support for testing orders
method = 'privatePostOrder'
test = self.safe_value(params, 'test', False)
if test:
method += 'Test'
params = self.omit(params, 'test')
uppercaseType = type.upper()
order = {
'symbol': market['id'],
'quantity': self.amount_to_precision(symbol, amount),
'type': uppercaseType,
'side': side.upper(),
'newOrderRespType': self.options['newOrderRespType'], # 'ACK' for order id, 'RESULT' for full order or 'FULL' for order with fills
}
timeInForceIsRequired = False
priceIsRequired = False
stopPriceIsRequired = False
if uppercaseType == 'LIMIT':
priceIsRequired = True
timeInForceIsRequired = True
elif (uppercaseType == 'STOP_LOSS') or (uppercaseType == 'TAKE_PROFIT'):
stopPriceIsRequired = True
elif (uppercaseType == 'STOP_LOSS_LIMIT') or (uppercaseType == 'TAKE_PROFIT_LIMIT'):
stopPriceIsRequired = True
priceIsRequired = True
timeInForceIsRequired = True
elif uppercaseType == 'LIMIT_MAKER':
priceIsRequired = True
if priceIsRequired:
if price is None:
raise InvalidOrder(self.id + ' createOrder method requires a price argument for a ' + type + ' order')
order['price'] = self.price_to_precision(symbol, price)
if timeInForceIsRequired:
order['timeInForce'] = self.options['defaultTimeInForce'] # 'GTC' = Good To Cancel(default), 'IOC' = Immediate Or Cancel
if stopPriceIsRequired:
stopPrice = self.safe_float(params, 'stopPrice')
if stopPrice is None:
raise InvalidOrder(self.id + ' createOrder method requires a stopPrice extra param for a ' + type + ' order')
else:
order['stopPrice'] = self.price_to_precision(symbol, stopPrice)
response = getattr(self, method)(self.extend(order, params))
return self.parse_order(response, market)
def fetch_order(self, id, symbol=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOrder requires a symbol argument')
self.load_markets()
market = self.market(symbol)
origClientOrderId = self.safe_value(params, 'origClientOrderId')
request = {
'symbol': market['id'],
}
if origClientOrderId is not None:
request['origClientOrderId'] = origClientOrderId
else:
request['orderId'] = int(id)
response = self.privateGetOrder(self.extend(request, params))
return self.parse_order(response, market)
def fetch_orders(self, symbol=None, since=None, limit=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOrders requires a symbol argument')
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
if limit is not None:
request['limit'] = limit
response = self.privateGetAllOrders(self.extend(request, params))
#
# [
# {
# "symbol": "LTCBTC",
# "orderId": 1,
# "clientOrderId": "myOrder1",
# "price": "0.1",
# "origQty": "1.0",
# "executedQty": "0.0",
# "cummulativeQuoteQty": "0.0",
# "status": "NEW",
# "timeInForce": "GTC",
# "type": "LIMIT",
# "side": "BUY",
# "stopPrice": "0.0",
# "icebergQty": "0.0",
# "time": 1499827319559,
# "updateTime": 1499827319559,
# "isWorking": True
# }
# ]
#
return self.parse_orders(response, market, since, limit)
def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
self.load_markets()
market = None
request = {}
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
elif self.options['warnOnFetchOpenOrdersWithoutSymbol']:
symbols = self.symbols
numSymbols = len(symbols)
fetchOpenOrdersRateLimit = int(numSymbols / 2)
raise ExchangeError(self.id + ' fetchOpenOrders WARNING: fetching open orders without specifying a symbol is rate-limited to one call per ' + str(fetchOpenOrdersRateLimit) + ' seconds. Do not call self method frequently to avoid ban. Set ' + self.id + '.options["warnOnFetchOpenOrdersWithoutSymbol"] = False to suppress self warning message.')
response = self.privateGetOpenOrders(self.extend(request, params))
return self.parse_orders(response, market, since, limit)
def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
orders = self.fetch_orders(symbol, since, limit, params)
return self.filter_by(orders, 'status', 'closed')
def cancel_order(self, id, symbol=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' cancelOrder requires a symbol argument')
self.load_markets()
market = self.market(symbol)
response = self.privateDeleteOrder(self.extend({
'symbol': market['id'],
'orderId': int(id),
# 'origClientOrderId': id,
}, params))
return self.parse_order(response)
def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchMyTrades requires a symbol argument')
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
if limit is not None:
request['limit'] = limit
response = self.privateGetMyTrades(self.extend(request, params))
return self.parse_trades(response, market, since, limit)
def fetch_deposits(self, code=None, since=None, limit=None, params={}):
self.load_markets()
currency = None
request = {}
if code is not None:
currency = self.currency(code)
request['asset'] = currency['id']
if since is not None:
request['startTime'] = since
response = self.wapiGetDepositHistory(self.extend(request, params))
#
# { success: True,
# depositList: [{insertTime: 1517425007000,
# amount: 0.3,
# address: "0x0123456789abcdef",
# addressTag: "",
# txId: "0x0123456789abcdef",
# asset: "ETH",
# status: 1 }]}
#
return self.parseTransactions(response['depositList'], currency, since, limit)
def fetch_withdrawals(self, code=None, since=None, limit=None, params={}):
self.load_markets()
currency = None
request = {}
if code is not None:
currency = self.currency(code)
request['asset'] = currency['id']
if since is not None:
request['startTime'] = since
response = self.wapiGetWithdrawHistory(self.extend(request, params))
#
# {withdrawList: [{ amount: 14,
# address: "0x0123456789abcdef...",
# successTime: 1514489710000,
# addressTag: "",
# txId: "0x0123456789abcdef...",
# id: "0123456789abcdef...",
# asset: "ETH",
# applyTime: 1514488724000,
# status: 6 },
# { amount: 7600,
# address: "0x0123456789abcdef...",
# successTime: 1515323226000,
# addressTag: "",
# txId: "0x0123456789abcdef...",
# id: "0123456789abcdef...",
# asset: "ICN",
# applyTime: 1515322539000,
# status: 6 } ],
# success: True }
#
return self.parseTransactions(response['withdrawList'], currency, since, limit)
def parse_transaction_status_by_type(self, status, type=None):
if type is None:
return status
statuses = {
'deposit': {
'0': 'pending',
'1': 'ok',
},
'withdrawal': {
'0': 'pending', # Email Sent
'1': 'canceled', # Cancelled(different from 1 = ok in deposits)
'2': 'pending', # Awaiting Approval
'3': 'failed', # Rejected
'4': 'pending', # Processing
'5': 'failed', # Failure
'6': 'ok', # Completed
},
}
return statuses[type][status] if (status in list(statuses[type].keys())) else status
def parse_transaction(self, transaction, currency=None):
#
# fetchDeposits
# {insertTime: 1517425007000,
# amount: 0.3,
# address: "0x0123456789abcdef",
# addressTag: "",
# txId: "0x0123456789abcdef",
# asset: "ETH",
# status: 1 }
#
# fetchWithdrawals
#
# { amount: 14,
# address: "0x0123456789abcdef...",
# successTime: 1514489710000,
# addressTag: "",
# txId: "0x0123456789abcdef...",
# id: "0123456789abcdef...",
# asset: "ETH",
# applyTime: 1514488724000,
# status: 6 }
#
id = self.safe_string(transaction, 'id')
address = self.safe_string(transaction, 'address')
tag = self.safe_string(transaction, 'addressTag') # set but unused
if len(tag) < 1:
tag = None
txid = self.safe_value(transaction, 'txId')
code = None
currencyId = self.safe_string(transaction, 'asset')
if currencyId in self.currencies_by_id:
currency = self.currencies_by_id[currencyId]
else:
code = self.common_currency_code(currencyId)
if currency is not None:
code = currency['code']
timestamp = None
insertTime = self.safe_integer(transaction, 'insertTime')
applyTime = self.safe_integer(transaction, 'applyTime')
type = self.safe_string(transaction, 'type')
if type is None:
if (insertTime is not None) and(applyTime is None):
type = 'deposit'
timestamp = insertTime
elif (insertTime is None) and(applyTime is not None):
type = 'withdrawal'
timestamp = applyTime
status = self.parse_transaction_status_by_type(self.safe_string(transaction, 'status'), type)
amount = self.safe_float(transaction, 'amount')
feeCost = None
fee = {
'cost': feeCost,
'currency': code,
}
return {
'info': transaction,
'id': id,
'txid': txid,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'address': address,
'tag': tag,
'type': type,
'amount': amount,
'currency': code,
'status': status,
'updated': None,
'fee': fee,
}
def fetch_deposit_address(self, code, params={}):
self.load_markets()
currency = self.currency(code)
response = self.wapiGetDepositAddress(self.extend({
'asset': currency['id'],
}, params))
if 'success' in response:
if response['success']:
address = self.safe_string(response, 'address')
tag = self.safe_string(response, 'addressTag')
return {
'currency': code,
'address': self.check_address(address),
'tag': tag,
'info': response,
}
def fetch_funding_fees(self, codes=None, params={}):
response = self.wapiGetAssetDetail()
#
# {
# "success": True,
# "assetDetail": {
# "CTR": {
# "minWithdrawAmount": "70.00000000", #min withdraw amount
# "depositStatus": False,//deposit status
# "withdrawFee": 35, # withdraw fee
# "withdrawStatus": True, #withdraw status
# "depositTip": "Delisted, Deposit Suspended" #reason
# },
# "SKY": {
# "minWithdrawAmount": "0.02000000",
# "depositStatus": True,
# "withdrawFee": 0.01,
# "withdrawStatus": True
# }
# }
# }
#
detail = self.safe_value(response, 'assetDetail')
ids = list(detail.keys())
withdrawFees = {}
for i in range(0, len(ids)):
id = ids[i]
code = self.common_currency_code(id)
withdrawFees[code] = self.safe_float(detail[id], 'withdrawFee')
return {
'withdraw': withdrawFees,
'deposit': {},
'info': response,
}
def withdraw(self, code, amount, address, tag=None, params={}):
self.check_address(address)
self.load_markets()
currency = self.currency(code)
name = address[0:20]
request = {
'asset': currency['id'],
'address': address,
'amount': float(amount),
'name': name,
}
if tag:
request['addressTag'] = tag
response = self.wapiPostWithdraw(self.extend(request, params))
return {
'info': response,
'id': self.safe_string(response, 'id'),
}
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = self.urls['api'][api]
url += '/' + path
if api == 'wapi':
url += '.html'
# v1 special case for userDataStream
if path == 'userDataStream':
body = self.urlencode(params)
headers = {
'X-MBX-APIKEY': self.apiKey,
'Content-Type': 'application/x-www-form-urlencoded',
}
elif (api == 'private') or (api == 'wapi'):
self.check_required_credentials()
query = self.urlencode(self.extend({
'timestamp': self.nonce(),
'recvWindow': self.options['recvWindow'],
}, params))
signature = self.hmac(self.encode(query), self.encode(self.secret))
query += '&' + 'signature=' + signature
headers = {
'X-MBX-APIKEY': self.apiKey,
}
if (method == 'GET') or (method == 'DELETE') or (api == 'wapi'):
url += '?' + query
else:
body = query
headers['Content-Type'] = 'application/x-www-form-urlencoded'
else:
if params:
url += '?' + self.urlencode(params)
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, code, reason, url, method, headers, body):
if (code == 418) or (code == 429):
raise DDoSProtection(self.id + ' ' + str(code) + ' ' + reason + ' ' + body)
# error response in a form: {"code": -1013, "msg": "Invalid quantity."}
# following block cointains legacy checks against message patterns in "msg" property
# will switch "code" checks eventually, when we know all of them
if code >= 400:
if body.find('Price * QTY is zero or less') >= 0:
raise InvalidOrder(self.id + ' order cost = amount * price is zero or less ' + body)
if body.find('LOT_SIZE') >= 0:
raise InvalidOrder(self.id + ' order amount should be evenly divisible by lot size ' + body)
if body.find('PRICE_FILTER') >= 0:
raise InvalidOrder(self.id + ' order price is invalid, i.e. exceeds allowed price precision, exceeds min price or max price limits or is invalid float value in general, use self.price_to_precision(symbol, amount) ' + body)
if len(body) > 0:
if body[0] == '{':
response = json.loads(body)
# check success value for wapi endpoints
# response in format {'msg': 'The coin does not exist.', 'success': True/false}
success = self.safe_value(response, 'success', True)
if not success:
message = self.safe_string(response, 'msg')
parsedMessage = None
if message is not None:
try:
parsedMessage = json.loads(message)
except Exception as e:
# do nothing
parsedMessage = None
if parsedMessage is not None:
response = parsedMessage
# checks against error codes
error = self.safe_string(response, 'code')
if error is not None:
exceptions = self.exceptions
if error in exceptions:
# a workaround for {"code":-2015,"msg":"Invalid API-key, IP, or permissions for action."}
# despite that their message is very confusing, it is raised by Binance
# on a temporary ban(the API key is valid, but disabled for a while)
if (error == '-2015') and self.options['hasAlreadyAuthenticatedSuccessfully']:
raise DDoSProtection(self.id + ' temporary banned: ' + body)
message = self.safe_string(response, 'msg')
if message == 'Order would trigger immediately.':
raise InvalidOrder(self.id + ' ' + body)
elif message == 'Account has insufficient balance for requested action.':
raise InsufficientFunds(self.id + ' ' + body)
elif message == 'Rest API trading is not enabled.':
raise ExchangeNotAvailable(self.id + ' ' + body)
raise exceptions[error](self.id + ' ' + body)
else:
raise ExchangeError(self.id + ' ' + body)
if not success:
raise ExchangeError(self.id + ' ' + body)
def request(self, path, api='public', method='GET', params={}, headers=None, body=None):
response = self.fetch2(path, api, method, params, headers, body)
# a workaround for {"code":-2015,"msg":"Invalid API-key, IP, or permissions for action."}
if (api == 'private') or (api == 'wapi'):
self.options['hasAlreadyAuthenticatedSuccessfully'] = True
return response
| 42.374663 | 355 | 0.468588 |
ge import Exchange
import math
import json
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import DDoSProtection
from ccxt.base.errors import ExchangeNotAvailable
from ccxt.base.errors import InvalidNonce
class binance (Exchange):
def describe(self):
return self.deep_extend(super(binance, self).describe(), {
'id': 'binance',
'name': 'Binance',
'countries': ['JP'],
'rateLimit': 500,
'certified': True,
'has': {
'fetchDepositAddress': True,
'CORS': False,
'fetchBidsAsks': True,
'fetchTickers': True,
'fetchOHLCV': True,
'fetchMyTrades': True,
'fetchOrder': True,
'fetchOrders': True,
'fetchOpenOrders': True,
'fetchClosedOrders': True,
'withdraw': True,
'fetchFundingFees': True,
'fetchDeposits': True,
'fetchWithdrawals': True,
'fetchTransactions': False,
},
'timeframes': {
'1m': '1m',
'3m': '3m',
'5m': '5m',
'15m': '15m',
'30m': '30m',
'1h': '1h',
'2h': '2h',
'4h': '4h',
'6h': '6h',
'8h': '8h',
'12h': '12h',
'1d': '1d',
'3d': '3d',
'1w': '1w',
'1M': '1M',
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/29604020-d5483cdc-87ee-11e7-94c7-d1a8d9169293.jpg',
'api': {
'web': 'https://www.binance.com',
'wapi': 'https://api.binance.com/wapi/v3',
'public': 'https://api.binance.com/api/v1',
'private': 'https://api.binance.com/api/v3',
'v3': 'https://api.binance.com/api/v3',
'v1': 'https://api.binance.com/api/v1',
},
'www': 'https://www.binance.com',
'referral': 'https://www.binance.com/?ref=10205187',
'doc': 'https://github.com/binance-exchange/binance-official-api-docs/blob/master/rest-api.md',
'fees': 'https://www.binance.com/en/fee/schedule',
},
'api': {
'web': {
'get': [
'exchange/public/product',
'assetWithdraw/getAllAsset.html',
],
},
'wapi': {
'post': [
'withdraw',
],
'get': [
'depositHistory',
'withdrawHistory',
'depositAddress',
'accountStatus',
'systemStatus',
'userAssetDribbletLog',
'tradeFee',
'assetDetail',
],
},
'v3': {
'get': [
'ticker/price',
'ticker/bookTicker',
],
},
'public': {
'get': [
'exchangeInfo',
'ping',
'time',
'depth',
'aggTrades',
'klines',
'ticker/24hr',
'ticker/allPrices',
'ticker/allBookTickers',
'ticker/price',
'ticker/bookTicker',
'exchangeInfo',
],
'put': ['userDataStream'],
'post': ['userDataStream'],
'delete': ['userDataStream'],
},
'private': {
'get': [
'order',
'openOrders',
'allOrders',
'account',
'myTrades',
],
'post': [
'order',
'order/test',
],
'delete': [
'order',
],
},
},
'fees': {
'trading': {
'tierBased': False,
'percentage': True,
'taker': 0.001,
'maker': 0.001,
},
'funding': {
'tierBased': False,
'percentage': False,
'withdraw': {
'ADA': 1.0,
'ADX': 4.7,
'AION': 1.9,
'AMB': 11.4,
'APPC': 6.5,
'ARK': 0.1,
'ARN': 3.1,
'AST': 10.0,
'BAT': 18.0,
'BCD': 1.0,
'BCH': 0.001,
'BCPT': 10.2,
'BCX': 1.0,
'BNB': 0.7,
'BNT': 1.5,
'BQX': 1.6,
'BRD': 6.4,
'BTC': 0.001,
'BTG': 0.001,
'BTM': 5.0,
'BTS': 1.0,
'CDT': 67.0,
'CMT': 37.0,
'CND': 47.0,
'CTR': 5.4,
'DASH': 0.002,
'DGD': 0.06,
'DLT': 11.7,
'DNT': 51.0,
'EDO': 2.5,
'ELF': 6.5,
'ENG': 2.1,
'ENJ': 42.0,
'EOS': 1.0,
'ETC': 0.01,
'ETF': 1.0,
'ETH': 0.01,
'EVX': 2.5,
'FUEL': 45.0,
'FUN': 85.0,
'GAS': 0,
'GTO': 20.0,
'GVT': 0.53,
'GXS': 0.3,
'HCC': 0.0005,
'HSR': 0.0001,
'ICN': 3.5,
'ICX': 1.3,
'INS': 1.5,
'IOTA': 0.5,
'KMD': 0.002,
'KNC': 2.6,
'LEND': 54.0,
'LINK': 12.8,
'LLT': 54.0,
'LRC': 9.1,
'LSK': 0.1,
'LTC': 0.01,
'LUN': 0.29,
'MANA': 74.0,
'MCO': 0.86,
'MDA': 4.7,
'MOD': 2.0,
'MTH': 34.0,
'MTL': 1.9,
'NAV': 0.2,
'NEBL': 0.01,
'NEO': 0.0,
'NULS': 2.1,
'OAX': 8.3,
'OMG': 0.57,
'OST': 17.0,
'POE': 88.0,
'POWR': 8.6,
'PPT': 0.25,
'QSP': 21.0,
'QTUM': 0.01,
'RCN': 35.0,
'RDN': 2.2,
'REQ': 18.1,
'RLC': 4.1,
'SALT': 1.3,
'SBTC': 1.0,
'SNGLS': 42,
'SNM': 29.0,
'SNT': 32.0,
'STORJ': 5.9,
'STRAT': 0.1,
'SUB': 7.4,
'TNB': 82.0,
'TNT': 47.0,
'TRIG': 6.7,
'TRX': 129.0,
'USDT': 23.0,
'VEN': 1.8,
'VIB': 28.0,
'VIBE': 7.2,
'WABI': 3.5,
'WAVES': 0.002,
'WINGS': 9.3,
'WTC': 0.5,
'XLM': 0.01,
'XMR': 0.04,
'XRP': 0.25,
'XVG': 0.1,
'XZC': 0.02,
'YOYOW': 39.0,
'ZEC': 0.005,
'ZRX': 5.7,
},
'deposit': {},
},
},
'commonCurrencies': {
'YOYO': 'YOYOW',
'BCC': 'BCH',
},
'options': {
'defaultTimeInForce': 'GTC',
'defaultLimitOrderType': 'limit',
'hasAlreadyAuthenticatedSuccessfully': False,
'warnOnFetchOpenOrdersWithoutSymbol': True,
'recvWindow': 5 * 1000,
'timeDifference': 0,
'adjustForTimeDifference': False,
'parseOrderToPrecision': False,
'newOrderRespType': 'RESULT',
},
'exceptions': {
'-1000': ExchangeNotAvailable,
'-1013': InvalidOrder,
'-1021': InvalidNonce,
'-1022': AuthenticationError,
'-1100': InvalidOrder,
'-1104': ExchangeError, # Not all sent parameters were read, read 8 parameters but was sent 9
'-1128': ExchangeError, # {"code":-1128,"msg":"Combination of optional parameters invalid."}
'-2010': ExchangeError, # generic error code for createOrder -> 'Account has insufficient balance for requested action.', {"code":-2010,"msg":"Rest API trading is not enabled."}, etc...
'-2011': OrderNotFound, # cancelOrder(1, 'BTC/USDT') -> 'UNKNOWN_ORDER'
'-2013': OrderNotFound, # fetchOrder(1, 'BTC/USDT') -> 'Order does not exist'
'-2014': AuthenticationError, # {"code":-2014, "msg": "API-key format invalid."}
'-2015': AuthenticationError, # "Invalid API-key, IP, or permissions for action."
},
})
def nonce(self):
return self.milliseconds() - self.options['timeDifference']
def load_time_difference(self):
response = self.publicGetTime()
after = self.milliseconds()
self.options['timeDifference'] = int(after - response['serverTime'])
return self.options['timeDifference']
def fetch_markets(self):
response = self.publicGetExchangeInfo()
if self.options['adjustForTimeDifference']:
self.load_time_difference()
markets = response['symbols']
result = []
for i in range(0, len(markets)):
market = markets[i]
id = market['symbol']
# "123456" is a "test symbol/market"
if id == '123456':
continue
baseId = market['baseAsset']
quoteId = market['quoteAsset']
base = self.common_currency_code(baseId)
quote = self.common_currency_code(quoteId)
symbol = base + '/' + quote
filters = self.index_by(market['filters'], 'filterType')
precision = {
'base': market['baseAssetPrecision'],
'quote': market['quotePrecision'],
'amount': market['baseAssetPrecision'],
'price': market['quotePrecision'],
}
active = (market['status'] == 'TRADING')
entry = {
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'baseId': baseId,
'quoteId': quoteId,
'info': market,
'active': active,
'precision': precision,
'limits': {
'amount': {
'min': math.pow(10, -precision['amount']),
'max': None,
},
'price': {
'min': math.pow(10, -precision['price']),
'max': None,
},
'cost': {
'min': -1 * math.log10(precision['amount']),
'max': None,
},
},
}
if 'PRICE_FILTER' in filters:
filter = filters['PRICE_FILTER']
entry['precision']['price'] = self.precision_from_string(filter['tickSize'])
entry['limits']['price'] = {
'min': self.safe_float(filter, 'minPrice'),
'max': self.safe_float(filter, 'maxPrice'),
}
if 'LOT_SIZE' in filters:
filter = filters['LOT_SIZE']
entry['precision']['amount'] = self.precision_from_string(filter['stepSize'])
entry['limits']['amount'] = {
'min': self.safe_float(filter, 'minQty'),
'max': self.safe_float(filter, 'maxQty'),
}
if 'MIN_NOTIONAL' in filters:
entry['limits']['cost']['min'] = float(filters['MIN_NOTIONAL']['minNotional'])
result.append(entry)
return result
def calculate_fee(self, symbol, type, side, amount, price, takerOrMaker='taker', params={}):
market = self.markets[symbol]
key = 'quote'
rate = market[takerOrMaker]
cost = float(self.cost_to_precision(symbol, amount * rate))
if side == 'sell':
cost *= price
else:
key = 'base'
return {
'type': takerOrMaker,
'currency': market[key],
'rate': rate,
'cost': float(self.fee_to_precision(symbol, cost)),
}
def fetch_balance(self, params={}):
self.load_markets()
response = self.privateGetAccount(params)
result = {'info': response}
balances = response['balances']
for i in range(0, len(balances)):
balance = balances[i]
currency = balance['asset']
if currency in self.currencies_by_id:
currency = self.currencies_by_id[currency]['code']
account = {
'free': float(balance['free']),
'used': float(balance['locked']),
'total': 0.0,
}
account['total'] = self.sum(account['free'], account['used'])
result[currency] = account
return self.parse_balance(result)
def fetch_order_book(self, symbol, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
if limit is not None:
request['limit'] = limit # default = maximum = 100
response = self.publicGetDepth(self.extend(request, params))
orderbook = self.parse_order_book(response)
orderbook['nonce'] = self.safe_integer(response, 'lastUpdateId')
return orderbook
def parse_ticker(self, ticker, market=None):
timestamp = self.safe_integer(ticker, 'closeTime')
iso8601 = None if (timestamp is None) else self.iso8601(timestamp)
symbol = self.find_symbol(self.safe_string(ticker, 'symbol'), market)
last = self.safe_float(ticker, 'lastPrice')
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': iso8601,
'high': self.safe_float(ticker, 'highPrice'),
'low': self.safe_float(ticker, 'lowPrice'),
'bid': self.safe_float(ticker, 'bidPrice'),
'bidVolume': self.safe_float(ticker, 'bidQty'),
'ask': self.safe_float(ticker, 'askPrice'),
'askVolume': self.safe_float(ticker, 'askQty'),
'vwap': self.safe_float(ticker, 'weightedAvgPrice'),
'open': self.safe_float(ticker, 'openPrice'),
'close': last,
'last': last,
'previousClose': self.safe_float(ticker, 'prevClosePrice'), # previous day close
'change': self.safe_float(ticker, 'priceChange'),
'percentage': self.safe_float(ticker, 'priceChangePercent'),
'average': None,
'baseVolume': self.safe_float(ticker, 'volume'),
'quoteVolume': self.safe_float(ticker, 'quoteVolume'),
'info': ticker,
}
def fetch_ticker(self, symbol, params={}):
self.load_markets()
market = self.market(symbol)
response = self.publicGetTicker24hr(self.extend({
'symbol': market['id'],
}, params))
return self.parse_ticker(response, market)
def parse_tickers(self, rawTickers, symbols=None):
tickers = []
for i in range(0, len(rawTickers)):
tickers.append(self.parse_ticker(rawTickers[i]))
return self.filter_by_array(tickers, 'symbol', symbols)
def fetch_bids_asks(self, symbols=None, params={}):
self.load_markets()
rawTickers = self.publicGetTickerBookTicker(params)
return self.parse_tickers(rawTickers, symbols)
def fetch_tickers(self, symbols=None, params={}):
self.load_markets()
rawTickers = self.publicGetTicker24hr(params)
return self.parse_tickers(rawTickers, symbols)
def parse_ohlcv(self, ohlcv, market=None, timeframe='1m', since=None, limit=None):
return [
ohlcv[0],
float(ohlcv[1]),
float(ohlcv[2]),
float(ohlcv[3]),
float(ohlcv[4]),
float(ohlcv[5]),
]
def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
'interval': self.timeframes[timeframe],
}
if since is not None:
request['startTime'] = since
if limit is not None:
request['limit'] = limit # default == max == 500
response = self.publicGetKlines(self.extend(request, params))
return self.parse_ohlcvs(response, market, timeframe, since, limit)
def parse_trade(self, trade, market=None):
timestampField = 'T' if ('T' in list(trade.keys())) else 'time'
timestamp = self.safe_integer(trade, timestampField)
priceField = 'p' if ('p' in list(trade.keys())) else 'price'
price = self.safe_float(trade, priceField)
amountField = 'q' if ('q' in list(trade.keys())) else 'qty'
amount = self.safe_float(trade, amountField)
idField = 'a' if ('a' in list(trade.keys())) else 'id'
id = self.safe_string(trade, idField)
side = None
order = None
if 'orderId' in trade:
order = self.safe_string(trade, 'orderId')
if 'm' in trade:
side = 'sell' if trade['m'] else 'buy' # self is reversed intentionally
else:
if 'isBuyer' in trade:
side = 'buy' if (trade['isBuyer']) else 'sell' # self is a True side
fee = None
if 'commission' in trade:
fee = {
'cost': self.safe_float(trade, 'commission'),
'currency': self.common_currency_code(trade['commissionAsset']),
}
takerOrMaker = None
if 'isMaker' in trade:
takerOrMaker = 'maker' if trade['isMaker'] else 'taker'
symbol = None
if market is None:
marketId = self.safe_string(trade, 'symbol')
market = self.safe_value(self.markets_by_id, marketId)
if market is not None:
symbol = market['symbol']
return {
'info': trade,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'id': id,
'order': order,
'type': None,
'takerOrMaker': takerOrMaker,
'side': side,
'price': price,
'cost': price * amount,
'amount': amount,
'fee': fee,
}
def fetch_trades(self, symbol, since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
if since is not None:
request['startTime'] = since
request['endTime'] = self.sum(since, 3600000)
if limit is not None:
request['limit'] = limit
# 'fromId': 123, # ID to get aggregate trades from INCLUSIVE.
# 'startTime': 456, # Timestamp in ms to get aggregate trades from INCLUSIVE.
# 'endTime': 789, # Timestamp in ms to get aggregate trades until INCLUSIVE.
# 'limit': 500, # default = 500, maximum = 1000
#
# Caveats:
# - default limit(500) applies only if no other parameters set, trades up
# to the maximum limit may be returned to satisfy other parameters
# - if both limit and time window is set and time window contains more
# trades than the limit then the last trades from the window are returned
# - 'tradeId' accepted and returned by self method is "aggregate" trade id
# which is different from actual trade id
# - setting both fromId and time window results in error
response = self.publicGetAggTrades(self.extend(request, params))
return self.parse_trades(response, market, since, limit)
def parse_order_status(self, status):
statuses = {
'NEW': 'open',
'PARTIALLY_FILLED': 'open',
'FILLED': 'closed',
'CANCELED': 'canceled',
}
return statuses[status] if (status in list(statuses.keys())) else status
def parse_order(self, order, market=None):
status = self.parse_order_status(self.safe_string(order, 'status'))
symbol = self.find_symbol(self.safe_string(order, 'symbol'), market)
timestamp = None
if 'time' in order:
timestamp = order['time']
elif 'transactTime' in order:
timestamp = order['transactTime']
price = self.safe_float(order, 'price')
amount = self.safe_float(order, 'origQty')
filled = self.safe_float(order, 'executedQty')
remaining = None
cost = self.safe_float(order, 'cummulativeQuoteQty')
if filled is not None:
if amount is not None:
remaining = amount - filled
if self.options['parseOrderToPrecision']:
remaining = float(self.amount_to_precision(symbol, remaining))
remaining = max(remaining, 0.0)
if price is not None:
if cost is None:
cost = price * filled
id = self.safe_string(order, 'orderId')
type = self.safe_string(order, 'type')
if type is not None:
type = type.lower()
if type == 'market':
if price == 0.0:
if (cost is not None) and(filled is not None):
if (cost > 0) and(filled > 0):
price = cost / filled
side = self.safe_string(order, 'side')
if side is not None:
side = side.lower()
fee = None
trades = None
fills = self.safe_value(order, 'fills')
if fills is not None:
trades = self.parse_trades(fills, market)
numTrades = len(trades)
if numTrades > 0:
cost = trades[0]['cost']
fee = {
'cost': trades[0]['fee']['cost'],
'currency': trades[0]['fee']['currency'],
}
for i in range(1, len(trades)):
cost = self.sum(cost, trades[i]['cost'])
fee['cost'] = self.sum(fee['cost'], trades[i]['fee']['cost'])
average = None
if cost is not None:
if filled:
average = cost / filled
if self.options['parseOrderToPrecision']:
cost = float(self.cost_to_precision(symbol, cost))
result = {
'info': order,
'id': id,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': None,
'symbol': symbol,
'type': type,
'side': side,
'price': price,
'amount': amount,
'cost': cost,
'average': average,
'filled': filled,
'remaining': remaining,
'status': status,
'fee': fee,
'trades': trades,
}
return result
def create_order(self, symbol, type, side, amount, price=None, params={}):
self.load_markets()
market = self.market(symbol)
# the next 5 lines are added to support for testing orders
method = 'privatePostOrder'
test = self.safe_value(params, 'test', False)
if test:
method += 'Test'
params = self.omit(params, 'test')
uppercaseType = type.upper()
order = {
'symbol': market['id'],
'quantity': self.amount_to_precision(symbol, amount),
'type': uppercaseType,
'side': side.upper(),
'newOrderRespType': self.options['newOrderRespType'], # 'ACK' for order id, 'RESULT' for full order or 'FULL' for order with fills
}
timeInForceIsRequired = False
priceIsRequired = False
stopPriceIsRequired = False
if uppercaseType == 'LIMIT':
priceIsRequired = True
timeInForceIsRequired = True
elif (uppercaseType == 'STOP_LOSS') or (uppercaseType == 'TAKE_PROFIT'):
stopPriceIsRequired = True
elif (uppercaseType == 'STOP_LOSS_LIMIT') or (uppercaseType == 'TAKE_PROFIT_LIMIT'):
stopPriceIsRequired = True
priceIsRequired = True
timeInForceIsRequired = True
elif uppercaseType == 'LIMIT_MAKER':
priceIsRequired = True
if priceIsRequired:
if price is None:
raise InvalidOrder(self.id + ' createOrder method requires a price argument for a ' + type + ' order')
order['price'] = self.price_to_precision(symbol, price)
if timeInForceIsRequired:
order['timeInForce'] = self.options['defaultTimeInForce'] # 'GTC' = Good To Cancel(default), 'IOC' = Immediate Or Cancel
if stopPriceIsRequired:
stopPrice = self.safe_float(params, 'stopPrice')
if stopPrice is None:
raise InvalidOrder(self.id + ' createOrder method requires a stopPrice extra param for a ' + type + ' order')
else:
order['stopPrice'] = self.price_to_precision(symbol, stopPrice)
response = getattr(self, method)(self.extend(order, params))
return self.parse_order(response, market)
def fetch_order(self, id, symbol=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOrder requires a symbol argument')
self.load_markets()
market = self.market(symbol)
origClientOrderId = self.safe_value(params, 'origClientOrderId')
request = {
'symbol': market['id'],
}
if origClientOrderId is not None:
request['origClientOrderId'] = origClientOrderId
else:
request['orderId'] = int(id)
response = self.privateGetOrder(self.extend(request, params))
return self.parse_order(response, market)
def fetch_orders(self, symbol=None, since=None, limit=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOrders requires a symbol argument')
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
if limit is not None:
request['limit'] = limit
response = self.privateGetAllOrders(self.extend(request, params))
#
# [
# {
# "symbol": "LTCBTC",
# "orderId": 1,
# "clientOrderId": "myOrder1",
# "price": "0.1",
# "origQty": "1.0",
# "executedQty": "0.0",
# "cummulativeQuoteQty": "0.0",
# "status": "NEW",
# "timeInForce": "GTC",
# "type": "LIMIT",
# "side": "BUY",
# "stopPrice": "0.0",
# "icebergQty": "0.0",
# "time": 1499827319559,
# "updateTime": 1499827319559,
# "isWorking": True
# }
# ]
#
return self.parse_orders(response, market, since, limit)
def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
self.load_markets()
market = None
request = {}
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
elif self.options['warnOnFetchOpenOrdersWithoutSymbol']:
symbols = self.symbols
numSymbols = len(symbols)
fetchOpenOrdersRateLimit = int(numSymbols / 2)
raise ExchangeError(self.id + ' fetchOpenOrders WARNING: fetching open orders without specifying a symbol is rate-limited to one call per ' + str(fetchOpenOrdersRateLimit) + ' seconds. Do not call self method frequently to avoid ban. Set ' + self.id + '.options["warnOnFetchOpenOrdersWithoutSymbol"] = False to suppress self warning message.')
response = self.privateGetOpenOrders(self.extend(request, params))
return self.parse_orders(response, market, since, limit)
def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
orders = self.fetch_orders(symbol, since, limit, params)
return self.filter_by(orders, 'status', 'closed')
def cancel_order(self, id, symbol=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' cancelOrder requires a symbol argument')
self.load_markets()
market = self.market(symbol)
response = self.privateDeleteOrder(self.extend({
'symbol': market['id'],
'orderId': int(id),
# 'origClientOrderId': id,
}, params))
return self.parse_order(response)
def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchMyTrades requires a symbol argument')
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
if limit is not None:
request['limit'] = limit
response = self.privateGetMyTrades(self.extend(request, params))
return self.parse_trades(response, market, since, limit)
def fetch_deposits(self, code=None, since=None, limit=None, params={}):
self.load_markets()
currency = None
request = {}
if code is not None:
currency = self.currency(code)
request['asset'] = currency['id']
if since is not None:
request['startTime'] = since
response = self.wapiGetDepositHistory(self.extend(request, params))
#
# { success: True,
# depositList: [{insertTime: 1517425007000,
# amount: 0.3,
# address: "0x0123456789abcdef",
# addressTag: "",
# txId: "0x0123456789abcdef",
# asset: "ETH",
# status: 1 }]}
#
return self.parseTransactions(response['depositList'], currency, since, limit)
def fetch_withdrawals(self, code=None, since=None, limit=None, params={}):
self.load_markets()
currency = None
request = {}
if code is not None:
currency = self.currency(code)
request['asset'] = currency['id']
if since is not None:
request['startTime'] = since
response = self.wapiGetWithdrawHistory(self.extend(request, params))
#
# {withdrawList: [{ amount: 14,
# address: "0x0123456789abcdef...",
# successTime: 1514489710000,
# addressTag: "",
# txId: "0x0123456789abcdef...",
# id: "0123456789abcdef...",
# asset: "ETH",
# applyTime: 1514488724000,
# status: 6 },
# { amount: 7600,
# address: "0x0123456789abcdef...",
# successTime: 1515323226000,
# addressTag: "",
# txId: "0x0123456789abcdef...",
# id: "0123456789abcdef...",
# asset: "ICN",
# applyTime: 1515322539000,
# status: 6 } ],
# success: True }
#
return self.parseTransactions(response['withdrawList'], currency, since, limit)
def parse_transaction_status_by_type(self, status, type=None):
if type is None:
return status
statuses = {
'deposit': {
'0': 'pending',
'1': 'ok',
},
'withdrawal': {
'0': 'pending', # Email Sent
'1': 'canceled', # Cancelled(different from 1 = ok in deposits)
'2': 'pending', # Awaiting Approval
'3': 'failed', # Rejected
'4': 'pending', # Processing
'5': 'failed', # Failure
'6': 'ok', # Completed
},
}
return statuses[type][status] if (status in list(statuses[type].keys())) else status
def parse_transaction(self, transaction, currency=None):
#
# fetchDeposits
# {insertTime: 1517425007000,
# amount: 0.3,
# address: "0x0123456789abcdef",
# addressTag: "",
# txId: "0x0123456789abcdef",
# asset: "ETH",
# status: 1 }
#
# fetchWithdrawals
#
# { amount: 14,
# address: "0x0123456789abcdef...",
# successTime: 1514489710000,
# addressTag: "",
# txId: "0x0123456789abcdef...",
# id: "0123456789abcdef...",
# asset: "ETH",
# applyTime: 1514488724000,
# status: 6 }
#
id = self.safe_string(transaction, 'id')
address = self.safe_string(transaction, 'address')
tag = self.safe_string(transaction, 'addressTag') # set but unused
if len(tag) < 1:
tag = None
txid = self.safe_value(transaction, 'txId')
code = None
currencyId = self.safe_string(transaction, 'asset')
if currencyId in self.currencies_by_id:
currency = self.currencies_by_id[currencyId]
else:
code = self.common_currency_code(currencyId)
if currency is not None:
code = currency['code']
timestamp = None
insertTime = self.safe_integer(transaction, 'insertTime')
applyTime = self.safe_integer(transaction, 'applyTime')
type = self.safe_string(transaction, 'type')
if type is None:
if (insertTime is not None) and(applyTime is None):
type = 'deposit'
timestamp = insertTime
elif (insertTime is None) and(applyTime is not None):
type = 'withdrawal'
timestamp = applyTime
status = self.parse_transaction_status_by_type(self.safe_string(transaction, 'status'), type)
amount = self.safe_float(transaction, 'amount')
feeCost = None
fee = {
'cost': feeCost,
'currency': code,
}
return {
'info': transaction,
'id': id,
'txid': txid,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'address': address,
'tag': tag,
'type': type,
'amount': amount,
'currency': code,
'status': status,
'updated': None,
'fee': fee,
}
def fetch_deposit_address(self, code, params={}):
self.load_markets()
currency = self.currency(code)
response = self.wapiGetDepositAddress(self.extend({
'asset': currency['id'],
}, params))
if 'success' in response:
if response['success']:
address = self.safe_string(response, 'address')
tag = self.safe_string(response, 'addressTag')
return {
'currency': code,
'address': self.check_address(address),
'tag': tag,
'info': response,
}
def fetch_funding_fees(self, codes=None, params={}):
response = self.wapiGetAssetDetail()
#
# {
# "success": True,
# "assetDetail": {
# "CTR": {
# "minWithdrawAmount": "70.00000000", #min withdraw amount
# "depositStatus": False,//deposit status
# "withdrawFee": 35, # withdraw fee
# "withdrawStatus": True, #withdraw status
# "depositTip": "Delisted, Deposit Suspended" #reason
# },
# "SKY": {
# "minWithdrawAmount": "0.02000000",
# "depositStatus": True,
# "withdrawFee": 0.01,
# "withdrawStatus": True
# }
# }
# }
#
detail = self.safe_value(response, 'assetDetail')
ids = list(detail.keys())
withdrawFees = {}
for i in range(0, len(ids)):
id = ids[i]
code = self.common_currency_code(id)
withdrawFees[code] = self.safe_float(detail[id], 'withdrawFee')
return {
'withdraw': withdrawFees,
'deposit': {},
'info': response,
}
def withdraw(self, code, amount, address, tag=None, params={}):
self.check_address(address)
self.load_markets()
currency = self.currency(code)
name = address[0:20]
request = {
'asset': currency['id'],
'address': address,
'amount': float(amount),
'name': name,
}
if tag:
request['addressTag'] = tag
response = self.wapiPostWithdraw(self.extend(request, params))
return {
'info': response,
'id': self.safe_string(response, 'id'),
}
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = self.urls['api'][api]
url += '/' + path
if api == 'wapi':
url += '.html'
# v1 special case for userDataStream
if path == 'userDataStream':
body = self.urlencode(params)
headers = {
'X-MBX-APIKEY': self.apiKey,
'Content-Type': 'application/x-www-form-urlencoded',
}
elif (api == 'private') or (api == 'wapi'):
self.check_required_credentials()
query = self.urlencode(self.extend({
'timestamp': self.nonce(),
'recvWindow': self.options['recvWindow'],
}, params))
signature = self.hmac(self.encode(query), self.encode(self.secret))
query += '&' + 'signature=' + signature
headers = {
'X-MBX-APIKEY': self.apiKey,
}
if (method == 'GET') or (method == 'DELETE') or (api == 'wapi'):
url += '?' + query
else:
body = query
headers['Content-Type'] = 'application/x-www-form-urlencoded'
else:
if params:
url += '?' + self.urlencode(params)
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, code, reason, url, method, headers, body):
if (code == 418) or (code == 429):
raise DDoSProtection(self.id + ' ' + str(code) + ' ' + reason + ' ' + body)
# error response in a form: {"code": -1013, "msg": "Invalid quantity."}
# following block cointains legacy checks against message patterns in "msg" property
# will switch "code" checks eventually, when we know all of them
if code >= 400:
if body.find('Price * QTY is zero or less') >= 0:
raise InvalidOrder(self.id + ' order cost = amount * price is zero or less ' + body)
if body.find('LOT_SIZE') >= 0:
raise InvalidOrder(self.id + ' order amount should be evenly divisible by lot size ' + body)
if body.find('PRICE_FILTER') >= 0:
raise InvalidOrder(self.id + ' order price is invalid, i.e. exceeds allowed price precision, exceeds min price or max price limits or is invalid float value in general, use self.price_to_precision(symbol, amount) ' + body)
if len(body) > 0:
if body[0] == '{':
response = json.loads(body)
# check success value for wapi endpoints
# response in format {'msg': 'The coin does not exist.', 'success': True/false}
success = self.safe_value(response, 'success', True)
if not success:
message = self.safe_string(response, 'msg')
parsedMessage = None
if message is not None:
try:
parsedMessage = json.loads(message)
except Exception as e:
# do nothing
parsedMessage = None
if parsedMessage is not None:
response = parsedMessage
# checks against error codes
error = self.safe_string(response, 'code')
if error is not None:
exceptions = self.exceptions
if error in exceptions:
# a workaround for {"code":-2015,"msg":"Invalid API-key, IP, or permissions for action."}
# despite that their message is very confusing, it is raised by Binance
# on a temporary ban(the API key is valid, but disabled for a while)
if (error == '-2015') and self.options['hasAlreadyAuthenticatedSuccessfully']:
raise DDoSProtection(self.id + ' temporary banned: ' + body)
message = self.safe_string(response, 'msg')
if message == 'Order would trigger immediately.':
raise InvalidOrder(self.id + ' ' + body)
elif message == 'Account has insufficient balance for requested action.':
raise InsufficientFunds(self.id + ' ' + body)
elif message == 'Rest API trading is not enabled.':
raise ExchangeNotAvailable(self.id + ' ' + body)
raise exceptions[error](self.id + ' ' + body)
else:
raise ExchangeError(self.id + ' ' + body)
if not success:
raise ExchangeError(self.id + ' ' + body)
def request(self, path, api='public', method='GET', params={}, headers=None, body=None):
response = self.fetch2(path, api, method, params, headers, body)
# a workaround for {"code":-2015,"msg":"Invalid API-key, IP, or permissions for action."}
if (api == 'private') or (api == 'wapi'):
self.options['hasAlreadyAuthenticatedSuccessfully'] = True
return response
| true | true |
f71ced92e40f1740937111306959303ed4663fa3 | 9,781 | py | Python | test.py | erprashu/Metal_erning | 79d1a6a457be37258df50a9194946caeb86845a2 | [
"MIT"
] | null | null | null | test.py | erprashu/Metal_erning | 79d1a6a457be37258df50a9194946caeb86845a2 | [
"MIT"
] | null | null | null | test.py | erprashu/Metal_erning | 79d1a6a457be37258df50a9194946caeb86845a2 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import argparse
import torch
import torch.nn.functional as F
from torch.utils.data import DataLoader
from torch.autograd import Variable
from tqdm import tqdm
from models.protonet_embedding import ProtoNetEmbedding
from models.R2D2_embedding import R2D2Embedding
from models.ResNet12_embedding import resnet12
from models.classification_heads import ClassificationHead, R2D2Head
from utils import pprint, set_gpu, Timer, count_accuracy, log
import random
import numpy as np
import os
import pdb
def get_model(options):
# Choose the embedding network
if options.network == 'ProtoNet':
network = ProtoNetEmbedding().cuda()
elif options.network == 'R2D2':
network = R2D2Embedding().cuda()
elif options.network == 'ResNet':
if options.dataset == 'miniImageNet' or options.dataset == 'tieredImageNet':
network = resnet12(avg_pool=False, drop_rate=0.1, dropblock_size=5).cuda()
network = torch.nn.DataParallel(network)
else:
network = resnet12(avg_pool=False, drop_rate=0.1, dropblock_size=2).cuda()
network = torch.nn.DataParallel(network)
else:
print ("Cannot recognize the network type")
assert(False)
# Choose the classification head
if opt.head == 'ProtoNet':
cls_head = ClassificationHead(base_learner='ProtoNet').cuda()
elif opt.head == 'Ridge':
cls_head = ClassificationHead(base_learner='Ridge').cuda()
elif opt.head == 'R2D2':
cls_head = R2D2Head().cuda()
elif opt.head == 'SVM':
cls_head = ClassificationHead(base_learner='SVM-CS').cuda()
else:
print ("Cannot recognize the classification head type")
assert(False)
return (network, cls_head)
def get_dataset(options):
# Choose the embedding network
if options.dataset == 'miniImageNet':
from data.mini_imagenet import MiniImageNet, FewShotDataloader
dataset_test = MiniImageNet(phase='test')
data_loader = FewShotDataloader
elif options.dataset == 'tieredImageNet':
from data.tiered_imagenet import tieredImageNet, FewShotDataloader
dataset_test = tieredImageNet(phase='test')
data_loader = FewShotDataloader
elif options.dataset == 'CIFAR_FS':
from data.CIFAR_FS import CIFAR_FS, FewShotDataloader
dataset_test = CIFAR_FS(phase='test')
data_loader = FewShotDataloader
elif options.dataset == 'FC100':
from data.FC100 import FC100, FewShotDataloader
dataset_test = FC100(phase='test')
data_loader = FewShotDataloader
else:
print ("Cannot recognize the dataset type")
assert(False)
return (dataset_test, data_loader)
def self_mix(data):
size = data.size()
W = size[-1]
H = size[-2]
# uniform
cx = np.random.randint(W)
cy = np.random.randint(H)
cut_w = W//2
cut_h = H//2
bbx1 = np.clip(cx - cut_w // 2, 0, W)
bby1 = np.clip(cy - cut_h // 2, 0, H)
bbx2 = np.clip(cx + cut_w // 2, 0, W)
bby2 = np.clip(cy + cut_h // 2, 0, H)
while True:
bbxn = np.random.randint(0, W-(bbx2-bbx1))
bbyn = np.random.randint(0, H-(bby2-bby1))
if bbxn != bbx1 or bbyn != bby1:
break
if (bbx2 - bbx1) == (bby2 - bby1):
k = random.sample([0, 1, 2, 3], 1)[0]
else:
k = 0
data[:, :, bbx1:bbx2, bby1:bby2] = torch.rot90(data[:, :, bbxn:bbxn + (bbx2-bbx1), bbyn:bbyn + (bby2-bby1)], k, [2,3])
#data[:, :, bbx1:bbx2, bby1:bby2] = data[:, :, bbxn:bbxn + (bbx2-bbx1), bbyn:bbyn + (bby2-bby1)]
return data
def flip(x, dim):
indices = [slice(None)] * x.dim()
indices[dim] = torch.arange(x.size(dim) - 1, -1, -1,
dtype=torch.long, device=x.device)
return x[tuple(indices)]
def build_grid(source_size,target_size):
k = float(target_size)/float(source_size)
direct = torch.linspace(-k,k,target_size).unsqueeze(0).repeat(target_size,1).unsqueeze(-1)
full = torch.cat([direct,direct.transpose(1,0)],dim=2).unsqueeze(0)
return full.cuda()
def random_crop_grid(x,grid):
delta = x.size(2)-grid.size(1)
grid = grid.repeat(x.size(0),1,1,1).cuda()
#Add random shifts by x
grid[:,:,:,0] = grid[:,:,:,0]+ torch.FloatTensor(x.size(0)).cuda().random_(0, delta).unsqueeze(-1).unsqueeze(-1).expand(-1, grid.size(1), grid.size(2)) /x.size(2)
#Add random shifts by y
grid[:,:,:,1] = grid[:,:,:,1]+ torch.FloatTensor(x.size(0)).cuda().random_(0, delta).unsqueeze(-1).unsqueeze(-1).expand(-1, grid.size(1), grid.size(2)) /x.size(2)
return grid
def random_cropping(batch, t):
#Building central crop of t pixel size
grid_source = build_grid(batch.size(-1),t)
#Make radom shift for each batch
grid_shifted = random_crop_grid(batch,grid_source)
#Sample using grid sample
sampled_batch = F.grid_sample(batch, grid_shifted, mode='nearest')
return sampled_batch
def shot_aug(data_support, labels_support, n_support, method, opt):
size = data_support.shape
if method == "fliplr":
n_support = opt.s_du * n_support
data_shot = flip(data_support, -1)
data_support = torch.cat((data_support, data_shot), dim = 1)
labels_support = torch.cat((labels_support, labels_support), dim = 1)
elif method == "random_crop":
n_support = opt.s_du * n_support
data_shot = F.pad(data_support.view([-1] + list(data_support.shape[-3:])), (4,4,4,4))
data_shot = random_cropping(data_shot, 32)
data_support = torch.cat((data_support, data_shot.view([size[0], -1] + list(data_support.shape[-3:]))), dim = 1)
labels_support = torch.cat((labels_support, labels_support), dim = 1)
return data_support, labels_support, n_support
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--gpu', default='0')
parser.add_argument('--load', default='./experiments/exp_1/best_model.pth',
help='path of the checkpoint file')
parser.add_argument('--episode', type=int, default=1000,
help='number of episodes to test')
parser.add_argument('--way', type=int, default=5,
help='number of classes in one test episode')
parser.add_argument('--shot', type=int, default=1,
help='number of support examples per training class')
parser.add_argument('--shot_aug', '-shotaug', default=[], nargs='+', type=str,
help='If use shot level data augmentation.')
parser.add_argument('--s_du', type=int, default=1,
help='number of support examples augmented by shot')
parser.add_argument('--query', type=int, default=15,
help='number of query examples per training class')
parser.add_argument('--network', type=str, default='ProtoNet',
help='choose which embedding network to use. ProtoNet, R2D2, ResNet')
parser.add_argument('--head', type=str, default='ProtoNet',
help='choose which embedding network to use. ProtoNet, Ridge, R2D2, SVM')
parser.add_argument('--dataset', type=str, default='miniImageNet',
help='choose which classification head to use. miniImageNet, tieredImageNet, CIFAR_FS, FC100')
opt = parser.parse_args()
(dataset_test, data_loader) = get_dataset(opt)
dloader_test = data_loader(
dataset=dataset_test,
nKnovel=opt.way,
nKbase=0,
nExemplars=opt.shot, # num training examples per novel category
nTestNovel=opt.query * opt.way, # num test examples for all the novel categories
nTestBase=0, # num test examples for all the base categories
batch_size=1,
num_workers=1,
epoch_size=opt.episode, # num of batches per epoch
)
set_gpu(opt.gpu)
# Define the models
(embedding_net, cls_head) = get_model(opt)
# Load saved model checkpoints
saved_models = torch.load(opt.load)
embedding_net.load_state_dict(saved_models['embedding'])
embedding_net.eval()
cls_head.load_state_dict(saved_models['head'])
cls_head.eval()
# Evaluate on test set
test_accuracies = []
for i, batch in enumerate(tqdm(dloader_test()), 1):
data_support, labels_support, data_query, labels_query, _, _ = [x.cuda() for x in batch]
n_support = opt.way * opt.shot
n_query = opt.way * opt.query
for method in opt.shot_aug:
data_support, labels_support, n_support = shot_aug(data_support, labels_support, n_support, method, opt)
with torch.no_grad():
emb_support = embedding_net(data_support.reshape([-1] + list(data_support.shape[-3:])))
emb_support = emb_support.reshape(1, n_support, -1)
emb_query = embedding_net(data_query.reshape([-1] + list(data_query.shape[-3:])))
emb_query = emb_query.reshape(1, n_query, -1)
if opt.head == 'SVM':
logits = cls_head(emb_query, emb_support, labels_support, opt.way, opt.shot, maxIter=3)
else:
logits = cls_head(emb_query, emb_support, labels_support, opt.way, opt.shot)
acc = count_accuracy(logits.reshape(-1, opt.way), labels_query.reshape(-1))
test_accuracies.append(acc.item())
avg = np.mean(np.array(test_accuracies))
std = np.std(np.array(test_accuracies))
ci = std / np.sqrt(i + 1)
if i % 50 == 0:
print('Episode [{}/{}]:\t\t\tAccuracy: {:.2f} ± {:.2f} % ({:.2f} %)'\
.format(i, opt.episode, avg, ci, acc))
| 39.922449 | 166 | 0.630815 |
import argparse
import torch
import torch.nn.functional as F
from torch.utils.data import DataLoader
from torch.autograd import Variable
from tqdm import tqdm
from models.protonet_embedding import ProtoNetEmbedding
from models.R2D2_embedding import R2D2Embedding
from models.ResNet12_embedding import resnet12
from models.classification_heads import ClassificationHead, R2D2Head
from utils import pprint, set_gpu, Timer, count_accuracy, log
import random
import numpy as np
import os
import pdb
def get_model(options):
if options.network == 'ProtoNet':
network = ProtoNetEmbedding().cuda()
elif options.network == 'R2D2':
network = R2D2Embedding().cuda()
elif options.network == 'ResNet':
if options.dataset == 'miniImageNet' or options.dataset == 'tieredImageNet':
network = resnet12(avg_pool=False, drop_rate=0.1, dropblock_size=5).cuda()
network = torch.nn.DataParallel(network)
else:
network = resnet12(avg_pool=False, drop_rate=0.1, dropblock_size=2).cuda()
network = torch.nn.DataParallel(network)
else:
print ("Cannot recognize the network type")
assert(False)
if opt.head == 'ProtoNet':
cls_head = ClassificationHead(base_learner='ProtoNet').cuda()
elif opt.head == 'Ridge':
cls_head = ClassificationHead(base_learner='Ridge').cuda()
elif opt.head == 'R2D2':
cls_head = R2D2Head().cuda()
elif opt.head == 'SVM':
cls_head = ClassificationHead(base_learner='SVM-CS').cuda()
else:
print ("Cannot recognize the classification head type")
assert(False)
return (network, cls_head)
def get_dataset(options):
if options.dataset == 'miniImageNet':
from data.mini_imagenet import MiniImageNet, FewShotDataloader
dataset_test = MiniImageNet(phase='test')
data_loader = FewShotDataloader
elif options.dataset == 'tieredImageNet':
from data.tiered_imagenet import tieredImageNet, FewShotDataloader
dataset_test = tieredImageNet(phase='test')
data_loader = FewShotDataloader
elif options.dataset == 'CIFAR_FS':
from data.CIFAR_FS import CIFAR_FS, FewShotDataloader
dataset_test = CIFAR_FS(phase='test')
data_loader = FewShotDataloader
elif options.dataset == 'FC100':
from data.FC100 import FC100, FewShotDataloader
dataset_test = FC100(phase='test')
data_loader = FewShotDataloader
else:
print ("Cannot recognize the dataset type")
assert(False)
return (dataset_test, data_loader)
def self_mix(data):
size = data.size()
W = size[-1]
H = size[-2]
cx = np.random.randint(W)
cy = np.random.randint(H)
cut_w = W//2
cut_h = H//2
bbx1 = np.clip(cx - cut_w // 2, 0, W)
bby1 = np.clip(cy - cut_h // 2, 0, H)
bbx2 = np.clip(cx + cut_w // 2, 0, W)
bby2 = np.clip(cy + cut_h // 2, 0, H)
while True:
bbxn = np.random.randint(0, W-(bbx2-bbx1))
bbyn = np.random.randint(0, H-(bby2-bby1))
if bbxn != bbx1 or bbyn != bby1:
break
if (bbx2 - bbx1) == (bby2 - bby1):
k = random.sample([0, 1, 2, 3], 1)[0]
else:
k = 0
data[:, :, bbx1:bbx2, bby1:bby2] = torch.rot90(data[:, :, bbxn:bbxn + (bbx2-bbx1), bbyn:bbyn + (bby2-bby1)], k, [2,3])
return data
def flip(x, dim):
indices = [slice(None)] * x.dim()
indices[dim] = torch.arange(x.size(dim) - 1, -1, -1,
dtype=torch.long, device=x.device)
return x[tuple(indices)]
def build_grid(source_size,target_size):
k = float(target_size)/float(source_size)
direct = torch.linspace(-k,k,target_size).unsqueeze(0).repeat(target_size,1).unsqueeze(-1)
full = torch.cat([direct,direct.transpose(1,0)],dim=2).unsqueeze(0)
return full.cuda()
def random_crop_grid(x,grid):
delta = x.size(2)-grid.size(1)
grid = grid.repeat(x.size(0),1,1,1).cuda()
grid[:,:,:,0] = grid[:,:,:,0]+ torch.FloatTensor(x.size(0)).cuda().random_(0, delta).unsqueeze(-1).unsqueeze(-1).expand(-1, grid.size(1), grid.size(2)) /x.size(2)
grid[:,:,:,1] = grid[:,:,:,1]+ torch.FloatTensor(x.size(0)).cuda().random_(0, delta).unsqueeze(-1).unsqueeze(-1).expand(-1, grid.size(1), grid.size(2)) /x.size(2)
return grid
def random_cropping(batch, t):
grid_source = build_grid(batch.size(-1),t)
grid_shifted = random_crop_grid(batch,grid_source)
sampled_batch = F.grid_sample(batch, grid_shifted, mode='nearest')
return sampled_batch
def shot_aug(data_support, labels_support, n_support, method, opt):
size = data_support.shape
if method == "fliplr":
n_support = opt.s_du * n_support
data_shot = flip(data_support, -1)
data_support = torch.cat((data_support, data_shot), dim = 1)
labels_support = torch.cat((labels_support, labels_support), dim = 1)
elif method == "random_crop":
n_support = opt.s_du * n_support
data_shot = F.pad(data_support.view([-1] + list(data_support.shape[-3:])), (4,4,4,4))
data_shot = random_cropping(data_shot, 32)
data_support = torch.cat((data_support, data_shot.view([size[0], -1] + list(data_support.shape[-3:]))), dim = 1)
labels_support = torch.cat((labels_support, labels_support), dim = 1)
return data_support, labels_support, n_support
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--gpu', default='0')
parser.add_argument('--load', default='./experiments/exp_1/best_model.pth',
help='path of the checkpoint file')
parser.add_argument('--episode', type=int, default=1000,
help='number of episodes to test')
parser.add_argument('--way', type=int, default=5,
help='number of classes in one test episode')
parser.add_argument('--shot', type=int, default=1,
help='number of support examples per training class')
parser.add_argument('--shot_aug', '-shotaug', default=[], nargs='+', type=str,
help='If use shot level data augmentation.')
parser.add_argument('--s_du', type=int, default=1,
help='number of support examples augmented by shot')
parser.add_argument('--query', type=int, default=15,
help='number of query examples per training class')
parser.add_argument('--network', type=str, default='ProtoNet',
help='choose which embedding network to use. ProtoNet, R2D2, ResNet')
parser.add_argument('--head', type=str, default='ProtoNet',
help='choose which embedding network to use. ProtoNet, Ridge, R2D2, SVM')
parser.add_argument('--dataset', type=str, default='miniImageNet',
help='choose which classification head to use. miniImageNet, tieredImageNet, CIFAR_FS, FC100')
opt = parser.parse_args()
(dataset_test, data_loader) = get_dataset(opt)
dloader_test = data_loader(
dataset=dataset_test,
nKnovel=opt.way,
nKbase=0,
nExemplars=opt.shot,
nTestNovel=opt.query * opt.way,
nTestBase=0,
batch_size=1,
num_workers=1,
epoch_size=opt.episode,
)
set_gpu(opt.gpu)
(embedding_net, cls_head) = get_model(opt)
saved_models = torch.load(opt.load)
embedding_net.load_state_dict(saved_models['embedding'])
embedding_net.eval()
cls_head.load_state_dict(saved_models['head'])
cls_head.eval()
test_accuracies = []
for i, batch in enumerate(tqdm(dloader_test()), 1):
data_support, labels_support, data_query, labels_query, _, _ = [x.cuda() for x in batch]
n_support = opt.way * opt.shot
n_query = opt.way * opt.query
for method in opt.shot_aug:
data_support, labels_support, n_support = shot_aug(data_support, labels_support, n_support, method, opt)
with torch.no_grad():
emb_support = embedding_net(data_support.reshape([-1] + list(data_support.shape[-3:])))
emb_support = emb_support.reshape(1, n_support, -1)
emb_query = embedding_net(data_query.reshape([-1] + list(data_query.shape[-3:])))
emb_query = emb_query.reshape(1, n_query, -1)
if opt.head == 'SVM':
logits = cls_head(emb_query, emb_support, labels_support, opt.way, opt.shot, maxIter=3)
else:
logits = cls_head(emb_query, emb_support, labels_support, opt.way, opt.shot)
acc = count_accuracy(logits.reshape(-1, opt.way), labels_query.reshape(-1))
test_accuracies.append(acc.item())
avg = np.mean(np.array(test_accuracies))
std = np.std(np.array(test_accuracies))
ci = std / np.sqrt(i + 1)
if i % 50 == 0:
print('Episode [{}/{}]:\t\t\tAccuracy: {:.2f} ± {:.2f} % ({:.2f} %)'\
.format(i, opt.episode, avg, ci, acc))
| true | true |
f71cee5ae51aff0ff873d978caa5e27b2de22765 | 2,707 | py | Python | ganggu/asynctask.py | xuecan/ganggu | f3d3727fc8228b899d2e2c7ebe99b9e4a9926a09 | [
"MIT"
] | null | null | null | ganggu/asynctask.py | xuecan/ganggu | f3d3727fc8228b899d2e2c7ebe99b9e4a9926a09 | [
"MIT"
] | null | null | null | ganggu/asynctask.py | xuecan/ganggu | f3d3727fc8228b899d2e2c7ebe99b9e4a9926a09 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright (C) 2012-2016 Xue Can <xuecan@gmail.com> and contributors.
# Licensed under the MIT license: http://opensource.org/licenses/mit-license
"""
Celery 应用程序生成器
Celery 应用程序的配置众多,这里提供一个快速的生成器,避免经常需要查阅手册。
本模块根据 Celery 4.0.0rc4 重新编写。配置详情请参考:
* http://docs.celeryproject.org/en/master/userguide/configuration.html
"""
import celery
if '4.0.0' > celery.__version__:
raise RuntimeError('Require celery 4.0.0rc4 or up')
from celery import Celery
from kombu.exceptions import OperationalError
from .datastructures import Object
def make_worker(name, set_as_current=True):
"""返回默认的 worker 实例,还需要进一步配置方可使用"""
name = str(name)
worker = Celery(name, set_as_current=set_as_current)
worker.conf.update(
# names
task_default_queue=name,
task_default_exchange=name,
task_default_routing_key=name,
# genenals
accept_content=['json'],
enable_utc=True,
timezone='Asia/Shanghai',
# tasks
task_serializer='json',
task_compression=None,
task_protocol=2,
task_track_started=True,
task_publish_retry=False, # no retry
# results
result_serializer='json',
result_compression=None,
result_expires=3600, # 1 hour
# workers
worker_prefetch_multiplier=1, # no prefetch
worker_disable_rate_limits=True, # no rate limit
worker_max_tasks_per_child=1000, # prevent memory leak
worker_hijack_root_logger=False # we have logkit
)
return worker
def with_retries(worker, max_=3, start=0, interval=0.2):
worker.conf.update(
task_publish_retry=True,
task_publish_retry_policy={
'max_retries': max_,
'interval_start': start,
'interval_step': interval,
'interval_max': interval,
}
)
def _with_broker(worker, broker, read_broker=None):
if read_broker:
worker.conf.broker_write_url = broker
worker.conf.broker_read_url = read_broker
else:
worker.conf.broker_url = broker
def with_amqp_broker(worker, broker, read_broker=None):
worker.conf.task_queue_ha_policy = 'all'
_with_broker(worker, broker, read_broker)
def with_redis_broker(worker, broker, read_broker=None):
worker.conf.broker_transport_options = {
'visibility_timeout': 3600,
'fanout_prefix': True,
'fanout_patterns': True,
}
_with_broker(worker, broker, read_broker)
def with_backend(worker, backend):
worker.conf.result_backend = backend
# patch: don't use image
import celery.utils.term
celery.utils.term.supports_images = lambda: False
| 27.907216 | 76 | 0.674917 |
import celery
if '4.0.0' > celery.__version__:
raise RuntimeError('Require celery 4.0.0rc4 or up')
from celery import Celery
from kombu.exceptions import OperationalError
from .datastructures import Object
def make_worker(name, set_as_current=True):
name = str(name)
worker = Celery(name, set_as_current=set_as_current)
worker.conf.update(
task_default_queue=name,
task_default_exchange=name,
task_default_routing_key=name,
accept_content=['json'],
enable_utc=True,
timezone='Asia/Shanghai',
task_serializer='json',
task_compression=None,
task_protocol=2,
task_track_started=True,
task_publish_retry=False,
result_serializer='json',
result_compression=None,
result_expires=3600,
worker_prefetch_multiplier=1,
worker_disable_rate_limits=True,
worker_max_tasks_per_child=1000,
worker_hijack_root_logger=False
)
return worker
def with_retries(worker, max_=3, start=0, interval=0.2):
worker.conf.update(
task_publish_retry=True,
task_publish_retry_policy={
'max_retries': max_,
'interval_start': start,
'interval_step': interval,
'interval_max': interval,
}
)
def _with_broker(worker, broker, read_broker=None):
if read_broker:
worker.conf.broker_write_url = broker
worker.conf.broker_read_url = read_broker
else:
worker.conf.broker_url = broker
def with_amqp_broker(worker, broker, read_broker=None):
worker.conf.task_queue_ha_policy = 'all'
_with_broker(worker, broker, read_broker)
def with_redis_broker(worker, broker, read_broker=None):
worker.conf.broker_transport_options = {
'visibility_timeout': 3600,
'fanout_prefix': True,
'fanout_patterns': True,
}
_with_broker(worker, broker, read_broker)
def with_backend(worker, backend):
worker.conf.result_backend = backend
import celery.utils.term
celery.utils.term.supports_images = lambda: False
| true | true |
f71ceecd29d1f9ce5d1bf239127cbd90242a62b2 | 7,255 | py | Python | src/layers/xfmr.py | uw-bionlp/ards | e9fc27f7034cc6b54f0ccdba4a58377948cf0258 | [
"BSD-3-Clause"
] | null | null | null | src/layers/xfmr.py | uw-bionlp/ards | e9fc27f7034cc6b54f0ccdba4a58377948cf0258 | [
"BSD-3-Clause"
] | null | null | null | src/layers/xfmr.py | uw-bionlp/ards | e9fc27f7034cc6b54f0ccdba4a58377948cf0258 | [
"BSD-3-Clause"
] | null | null | null |
import torch
from tqdm import tqdm
from transformers import AutoTokenizer, AutoModel
import logging
from torch.nn import ConstantPad3d, ConstantPad2d
from layers.utils import set_model_device, set_tensor_device
'''
tutorial4 tokenization
https://mccormickml.com/2019/07/22/BERT-fine-tuning/
how to use clinical bert
https://huggingface.co/emilyalsentzer/Bio_ClinicalBERT
align ng character offsets with bert tokenization
https://github.com/LightTag/sequence-labeling-with-transformers/blob/master/sequence_aligner/dataset.py
'''
INPUT_IDS = 'input_ids'
ATTENTION_MASK = 'attention_mask'
OFFSET_MAPPING = 'offset_mapping'
PRETRAINED = "emilyalsentzer/Bio_ClinicalBERT"
def tokenize_documents(documents, \
pretrained=PRETRAINED,
add_special_tokens=True,
max_length=50,
return_attention_mask=True,
return_tensors='pt',
return_offsets_mapping=True,
is_split_into_words=False
):
logging.info("Tokenization using AutoTokenizer")
# Instantiate tokenizer
tokenizer = AutoTokenizer.from_pretrained(pretrained)
# Tokenize all of the sentences and map the tokens to thier word IDs.
input_ids = []
mask = []
offsets = []
pbar = tqdm(total=len(documents))
for i, text in enumerate(documents):
# `encode_plus` will:
# (1) Tokenize the sentence.
# (2) Prepend the `[CLS]` token to the start.
# (3) Append the `[SEP]` token to the end.
# (4) Map tokens to their IDs.
# (5) Pad or truncate the sentence to `max_length`
# (6) Create attention masks for [PAD] tokens.
encoded_dict = tokenizer.batch_encode_plus(
text, # Sentence to encode.
add_special_tokens = add_special_tokens, # Add '[CLS]' and '[SEP]'
max_length = max_length, # Pad & truncate all sentences.
padding = 'max_length',
truncation = True,
return_attention_mask = return_attention_mask, # Construct attn. masks.
return_tensors = return_tensors, # Return pytorch tensors.
return_offsets_mapping = return_offsets_mapping,
is_split_into_words = is_split_into_words)
input_ids.append(encoded_dict[INPUT_IDS])
mask.append(encoded_dict[ATTENTION_MASK])
offsets_ = encoded_dict[OFFSET_MAPPING].tolist()
offsets_ = [[tuple(token) for token in sentence] for sentence in offsets_]
offsets.append(offsets_)
if i == 0:
logging.info("-"*80)
logging.info("")
logging.info("Returned params:\n{}".format(encoded_dict.keys()))
logging.info("")
logging.info('Input:\n{}'.format(text))
logging.info("")
#logging.info('IDs: {}\n{}'.format(input_ids[0].shape, input_ids[0]))
logging.info('IDs: {}'.format(input_ids[0].shape))
logging.info("")
#logging.info('Attn: {}\n{}'.format(mask[0].shape, mask[0]))
logging.info('Attn: {}'.format(mask[0].shape))
wps = [tokenizer.convert_ids_to_tokens(ids_) for ids_ in input_ids[0].squeeze()]
logging.info("")
logging.info('Tok:\n')
for wps_ in wps[:10]:
logging.info(f'{wps_[:10]} ....')
#logging.info("")
#logging.info('Idx:\n{}'.format(offsets[0]))
#logging.info("")
#logging.info("-"*80)
pbar.update()
pbar.close()
logging.info("")
logging.info('Document count: {}'.format(len(input_ids)))
logging.info("")
return (input_ids, mask, offsets)
def encode_documents(input_ids, mask, \
pretrained=PRETRAINED,
device=None,
train=False):
logging.info("Embedding using AutoModel")
model = AutoModel.from_pretrained(pretrained)
if train:
model.train()
else:
model.eval()
set_model_device(model, device)
X = []
masks = []
pbar = tqdm(total=len(input_ids))
assert len(input_ids) == len(mask)
for i, (ids, msk) in enumerate(zip(input_ids, mask)):
ids = set_tensor_device(ids, device)
msk = set_tensor_device(msk, device)
x = model( \
ids,
token_type_ids=None,
attention_mask=msk)[0]
x = x.cpu().detach()
X.append(x)
if i == 1:
logging.info("Encode documents")
#logging.info("-"*80)
#logging.info("")
#logging.info('IDs: {}\n{}'.format(ids.shape, ids))
logging.info('IDs: {}'.format(ids.shape))
#logging.info("")
#logging.info('Mask: {}\n{}'.format(msk.shape, msk))
logging.info('Mask: {}'.format(msk.shape))
#logging.info("")
#logging.info('X: {}\n{}'.format(x.shape, x))
logging.info('X: {}'.format(x.shape))
logging.info('')
#logging.info("")
#logging.info("-"*80)
pbar.update()
pbar.close()
logging.info("")
logging.info('Document count: {}'.format(len(X)))
logging.info("")
return X
def char2wordpiece(start, end, offsets):
'''
convert character indices to word piece indices
(i.e. document)
Parameters
----------
char_indices: character indices for span
offsets: offsets returned by transformer tokenizer
Returns
-------
word_indices: word piece indices for spans
'''
start_new = -1
end_new = -1
for index, (start_word, end_word) in enumerate(offsets):
# start_word = character index of word piece start (inclusive)
# end_word = character index of word piece end (exclusive)
# index = index of word peice in sentence
if (start_new == -1) and \
(start >= start_word) and \
(start < end_word):
start_new = index
if (end_new == -1) and \
(end > start_word) and \
(end <= end_word):
# add one so end_new is exclusive
end_new = index + 1
assert start_new != -1
assert end_new != -1
return (start_new, end_new)
def wordpiece2char(start, end, offsets):
'''
convert word piece indices to character indices for sequence of sentences
(i.e. document)
Parameters
----------
word_indices: word piece indices for spans
offsets: offsets returned by transformer tokenizer
Returns
-------
char_indices: character indices per spans
'''
indices = offsets[start:end]
# character index of start
start_new = indices[0][0]
# character index of end
end_new = indices[-1][-1]
return (start_new, end_new)
def demo():
#loggers = [logging.getLogger(name) for name in logging.root.manager.loggerDict]
#for logger in loggers:#
# logger.setLevel(logging.info)
documents = [['patient is reporting fever and cough.', 'chest x re indicates bilateral infile traits'],
['diffuse lung disease', 'reporting position is addr']]
tokens = tokenize_documents(documents, max_length=19)
embedding = encode_documents(tokens)
| 26.478102 | 107 | 0.596554 |
import torch
from tqdm import tqdm
from transformers import AutoTokenizer, AutoModel
import logging
from torch.nn import ConstantPad3d, ConstantPad2d
from layers.utils import set_model_device, set_tensor_device
INPUT_IDS = 'input_ids'
ATTENTION_MASK = 'attention_mask'
OFFSET_MAPPING = 'offset_mapping'
PRETRAINED = "emilyalsentzer/Bio_ClinicalBERT"
def tokenize_documents(documents, \
pretrained=PRETRAINED,
add_special_tokens=True,
max_length=50,
return_attention_mask=True,
return_tensors='pt',
return_offsets_mapping=True,
is_split_into_words=False
):
logging.info("Tokenization using AutoTokenizer")
tokenizer = AutoTokenizer.from_pretrained(pretrained)
input_ids = []
mask = []
offsets = []
pbar = tqdm(total=len(documents))
for i, text in enumerate(documents):
encoded_dict = tokenizer.batch_encode_plus(
text,
add_special_tokens = add_special_tokens,
max_length = max_length,
padding = 'max_length',
truncation = True,
return_attention_mask = return_attention_mask,
return_tensors = return_tensors,
return_offsets_mapping = return_offsets_mapping,
is_split_into_words = is_split_into_words)
input_ids.append(encoded_dict[INPUT_IDS])
mask.append(encoded_dict[ATTENTION_MASK])
offsets_ = encoded_dict[OFFSET_MAPPING].tolist()
offsets_ = [[tuple(token) for token in sentence] for sentence in offsets_]
offsets.append(offsets_)
if i == 0:
logging.info("-"*80)
logging.info("")
logging.info("Returned params:\n{}".format(encoded_dict.keys()))
logging.info("")
logging.info('Input:\n{}'.format(text))
logging.info("")
logging.info('IDs: {}'.format(input_ids[0].shape))
logging.info("")
logging.info('Attn: {}'.format(mask[0].shape))
wps = [tokenizer.convert_ids_to_tokens(ids_) for ids_ in input_ids[0].squeeze()]
logging.info("")
logging.info('Tok:\n')
for wps_ in wps[:10]:
logging.info(f'{wps_[:10]} ....')
pbar.update()
pbar.close()
logging.info("")
logging.info('Document count: {}'.format(len(input_ids)))
logging.info("")
return (input_ids, mask, offsets)
def encode_documents(input_ids, mask, \
pretrained=PRETRAINED,
device=None,
train=False):
logging.info("Embedding using AutoModel")
model = AutoModel.from_pretrained(pretrained)
if train:
model.train()
else:
model.eval()
set_model_device(model, device)
X = []
masks = []
pbar = tqdm(total=len(input_ids))
assert len(input_ids) == len(mask)
for i, (ids, msk) in enumerate(zip(input_ids, mask)):
ids = set_tensor_device(ids, device)
msk = set_tensor_device(msk, device)
x = model( \
ids,
token_type_ids=None,
attention_mask=msk)[0]
x = x.cpu().detach()
X.append(x)
if i == 1:
logging.info("Encode documents")
logging.info('IDs: {}'.format(ids.shape))
logging.info('Mask: {}'.format(msk.shape))
logging.info('X: {}'.format(x.shape))
logging.info('')
pbar.update()
pbar.close()
logging.info("")
logging.info('Document count: {}'.format(len(X)))
logging.info("")
return X
def char2wordpiece(start, end, offsets):
start_new = -1
end_new = -1
for index, (start_word, end_word) in enumerate(offsets):
if (start_new == -1) and \
(start >= start_word) and \
(start < end_word):
start_new = index
if (end_new == -1) and \
(end > start_word) and \
(end <= end_word):
end_new = index + 1
assert start_new != -1
assert end_new != -1
return (start_new, end_new)
def wordpiece2char(start, end, offsets):
indices = offsets[start:end]
start_new = indices[0][0]
end_new = indices[-1][-1]
return (start_new, end_new)
def demo():
documents = [['patient is reporting fever and cough.', 'chest x re indicates bilateral infile traits'],
['diffuse lung disease', 'reporting position is addr']]
tokens = tokenize_documents(documents, max_length=19)
embedding = encode_documents(tokens)
| true | true |
f71ceed56868a5e9294f76930727f0329ca98560 | 113 | py | Python | docs/docs_app/not_found.py | glsdown/dash-loading-spinners | 5fdfe9fc439b6c7aa624c23fb72123b785c9de8e | [
"MIT"
] | 14 | 2021-06-21T16:34:20.000Z | 2022-02-25T21:42:30.000Z | docs/docs_app/not_found.py | glsdown/dash-loading-spinners | 5fdfe9fc439b6c7aa624c23fb72123b785c9de8e | [
"MIT"
] | null | null | null | docs/docs_app/not_found.py | glsdown/dash-loading-spinners | 5fdfe9fc439b6c7aa624c23fb72123b785c9de8e | [
"MIT"
] | null | null | null | import dash_bootstrap_components as dbc
layout = dbc.Jumbotron(["404 - Not Found"], className="h4 text-danger")
| 28.25 | 71 | 0.761062 | import dash_bootstrap_components as dbc
layout = dbc.Jumbotron(["404 - Not Found"], className="h4 text-danger")
| true | true |
f71ceed8923c4307e3f9f61c40b8ca31ec7f9d14 | 3,034 | py | Python | MxShop/extra_apps/xadmin/views/website.py | youshuad/django-vue-shop | dbede2301b10cb95ef30d0bbbbd594b240071fc1 | [
"MIT"
] | null | null | null | MxShop/extra_apps/xadmin/views/website.py | youshuad/django-vue-shop | dbede2301b10cb95ef30d0bbbbd594b240071fc1 | [
"MIT"
] | null | null | null | MxShop/extra_apps/xadmin/views/website.py | youshuad/django-vue-shop | dbede2301b10cb95ef30d0bbbbd594b240071fc1 | [
"MIT"
] | null | null | null | from __future__ import absolute_import
from django.utils.translation import ugettext as _
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.views.decorators.cache import never_cache
# from django.contrib.auth import login,logout,authenticate
from django.contrib.auth.views import LoginView as login
from django.contrib.auth.views import LogoutView as logout
from django.contrib.auth.forms import UserCreationForm
from django.http import HttpResponse
from .base import BaseAdminView, filter_hook
from .dashboard import Dashboard
from xadmin.forms import AdminAuthenticationForm
from xadmin.models import UserSettings
from xadmin.layout import FormHelper
class IndexView(Dashboard):
title = _("Main Dashboard")
icon = "fa fa-dashboard"
def get_page_id(self):
return 'home'
class UserSettingView(BaseAdminView):
@never_cache
def post(self, request):
key = request.POST['key']
val = request.POST['value']
us, created = UserSettings.objects.get_or_create(
user=self.user, key=key)
us.value = val
us.save()
return HttpResponse('')
class LoginView(BaseAdminView):
title = _("Please Login")
login_form = None
login_template = None
@filter_hook
def update_params(self, defaults):
pass
@never_cache
def get(self, request, *args, **kwargs):
context = self.get_context()
helper = FormHelper()
helper.form_tag = False
helper.include_media = False
context.update({
'title': self.title,
'helper': helper,
'app_path': request.get_full_path(),
REDIRECT_FIELD_NAME: request.get_full_path(),
})
defaults = {
'extra_context': context,
# 'current_app': self.admin_site.name,
'authentication_form': self.login_form or AdminAuthenticationForm,
'template_name': self.login_template or 'xadmin/views/login.html',
}
self.update_params(defaults)
# return login(request, **defaults)
return login.as_view(**defaults)(request)
@never_cache
def post(self, request, *args, **kwargs):
return self.get(request)
class LogoutView(BaseAdminView):
logout_template = None
need_site_permission = False
@filter_hook
def update_params(self, defaults):
pass
@never_cache
def get(self, request, *args, **kwargs):
context = self.get_context()
defaults = {
'extra_context': context,
# 'current_app': self.admin_site.name,
'template_name': self.logout_template or 'xadmin/views/logged_out.html',
}
if self.logout_template is not None:
defaults['template_name'] = self.logout_template
self.update_params(defaults)
# return logout(request, **defaults)
return logout.as_view(**defaults)(request)
@never_cache
def post(self, request, *args, **kwargs):
return self.get(request)
| 29.456311 | 84 | 0.66381 | from __future__ import absolute_import
from django.utils.translation import ugettext as _
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.views.decorators.cache import never_cache
from django.contrib.auth.views import LoginView as login
from django.contrib.auth.views import LogoutView as logout
from django.contrib.auth.forms import UserCreationForm
from django.http import HttpResponse
from .base import BaseAdminView, filter_hook
from .dashboard import Dashboard
from xadmin.forms import AdminAuthenticationForm
from xadmin.models import UserSettings
from xadmin.layout import FormHelper
class IndexView(Dashboard):
title = _("Main Dashboard")
icon = "fa fa-dashboard"
def get_page_id(self):
return 'home'
class UserSettingView(BaseAdminView):
@never_cache
def post(self, request):
key = request.POST['key']
val = request.POST['value']
us, created = UserSettings.objects.get_or_create(
user=self.user, key=key)
us.value = val
us.save()
return HttpResponse('')
class LoginView(BaseAdminView):
title = _("Please Login")
login_form = None
login_template = None
@filter_hook
def update_params(self, defaults):
pass
@never_cache
def get(self, request, *args, **kwargs):
context = self.get_context()
helper = FormHelper()
helper.form_tag = False
helper.include_media = False
context.update({
'title': self.title,
'helper': helper,
'app_path': request.get_full_path(),
REDIRECT_FIELD_NAME: request.get_full_path(),
})
defaults = {
'extra_context': context,
'authentication_form': self.login_form or AdminAuthenticationForm,
'template_name': self.login_template or 'xadmin/views/login.html',
}
self.update_params(defaults)
return login.as_view(**defaults)(request)
@never_cache
def post(self, request, *args, **kwargs):
return self.get(request)
class LogoutView(BaseAdminView):
logout_template = None
need_site_permission = False
@filter_hook
def update_params(self, defaults):
pass
@never_cache
def get(self, request, *args, **kwargs):
context = self.get_context()
defaults = {
'extra_context': context,
'template_name': self.logout_template or 'xadmin/views/logged_out.html',
}
if self.logout_template is not None:
defaults['template_name'] = self.logout_template
self.update_params(defaults)
return logout.as_view(**defaults)(request)
@never_cache
def post(self, request, *args, **kwargs):
return self.get(request)
| true | true |
f71ceefdeb70227f2eeec78efabe0c05cba65a6e | 5,151 | py | Python | docs-src/source/conf.py | daxnet/apworks-core | 29347f7fe93fa547c3cdfdbf5eec31fc1a4dcb32 | [
"Apache-2.0"
] | 212 | 2017-02-25T07:56:35.000Z | 2022-03-29T01:58:48.000Z | docs-src/source/conf.py | dahaoniuniu/apworks-core | 580492f0d03633a4a1099b44dd967a06d8fb8308 | [
"Apache-2.0"
] | 7 | 2017-04-08T14:06:49.000Z | 2021-03-30T12:58:06.000Z | docs-src/source/conf.py | dahaoniuniu/apworks-core | 580492f0d03633a4a1099b44dd967a06d8fb8308 | [
"Apache-2.0"
] | 59 | 2017-04-03T06:47:10.000Z | 2021-08-18T05:40:47.000Z | # -*- coding: utf-8 -*-
#
# Apworks documentation build configuration file, created by
# sphinx-quickstart on Sat Mar 25 15:41:49 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Apworks'
copyright = u'2017, Sunny Chen'
author = u'Sunny Chen'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'1.0'
# The full version, including alpha/beta/rc tags.
release = u'1.0.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = 'en,zh_CN'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'Apworksdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Apworks.tex', u'Apworks Documentation',
u'Sunny Chen', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'apworks', u'Apworks Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Apworks', u'Apworks Documentation',
author, 'Apworks', 'One line description of project.',
'Miscellaneous'),
]
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
| 28.938202 | 79 | 0.677732 |
extensions = []
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'Apworks'
copyright = u'2017, Sunny Chen'
author = u'Sunny Chen'
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'1.0'
# The full version, including alpha/beta/rc tags.
release = u'1.0.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = 'en,zh_CN'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'Apworksdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Apworks.tex', u'Apworks Documentation',
u'Sunny Chen', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'apworks', u'Apworks Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Apworks', u'Apworks Documentation',
author, 'Apworks', 'One line description of project.',
'Miscellaneous'),
]
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
| true | true |
f71cf026b7d351eb49d776725321c404f4254c2b | 1,032 | py | Python | proxy_client.py | AcidCannon/CMPUT404.W2021.LAB2 | cb680f98bc625415a4fb25bc091e802ba05df238 | [
"Apache-2.0"
] | null | null | null | proxy_client.py | AcidCannon/CMPUT404.W2021.LAB2 | cb680f98bc625415a4fb25bc091e802ba05df238 | [
"Apache-2.0"
] | null | null | null | proxy_client.py | AcidCannon/CMPUT404.W2021.LAB2 | cb680f98bc625415a4fb25bc091e802ba05df238 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
import socket
# CONSTANTS
OUTBOUND_HOST = "127.0.0.1"
OUTBOUND_PORT = 8001
OUTBOUND_BUFFER_SIZE = 1024
PAYLOAD_URL = "www.google.com"
PAYLOAD = f"GET / HTTP/1.0\r\nHost: {PAYLOAD_URL}\r\n\r\n"
def main():
# Create a socket object
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
# Connect to the proxy server
s.connect((OUTBOUND_HOST, OUTBOUND_PORT))
# Send the payload to the proxy server
s.sendall(PAYLOAD.encode())
# Get IP and port of peer
peer_addr = s.getpeername()
# No longer write/send
s.shutdown(socket.SHUT_WR)
# Reading data until no more left
data = b""
while True:
fetched_data = s.recv(OUTBOUND_BUFFER_SIZE)
if not fetched_data:
break
data += fetched_data
print("Received From:", str(peer_addr[0]) + ":" + str(peer_addr[1]), "Content:", data)
if __name__ == "__main__":
main() | 27.157895 | 95 | 0.591085 |
import socket
OUTBOUND_HOST = "127.0.0.1"
OUTBOUND_PORT = 8001
OUTBOUND_BUFFER_SIZE = 1024
PAYLOAD_URL = "www.google.com"
PAYLOAD = f"GET / HTTP/1.0\r\nHost: {PAYLOAD_URL}\r\n\r\n"
def main():
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.connect((OUTBOUND_HOST, OUTBOUND_PORT))
s.sendall(PAYLOAD.encode())
peer_addr = s.getpeername()
s.shutdown(socket.SHUT_WR)
data = b""
while True:
fetched_data = s.recv(OUTBOUND_BUFFER_SIZE)
if not fetched_data:
break
data += fetched_data
print("Received From:", str(peer_addr[0]) + ":" + str(peer_addr[1]), "Content:", data)
if __name__ == "__main__":
main() | true | true |
f71cf03658518edaba3cbd0aee36d8141f9b1311 | 11,097 | py | Python | src/olympia/users/forms.py | Rob--W/addons-server | cc104705e17ddeeb57254403ed292acb904a9a41 | [
"BSD-3-Clause"
] | 1 | 2020-04-07T07:21:25.000Z | 2020-04-07T07:21:25.000Z | src/olympia/users/forms.py | Rob--W/addons-server | cc104705e17ddeeb57254403ed292acb904a9a41 | [
"BSD-3-Clause"
] | null | null | null | src/olympia/users/forms.py | Rob--W/addons-server | cc104705e17ddeeb57254403ed292acb904a9a41 | [
"BSD-3-Clause"
] | 2 | 2018-03-04T00:11:22.000Z | 2019-12-14T09:45:55.000Z | import os
import re
from django import forms
from django.conf import settings
from django.core.files.storage import default_storage as storage
from django.utils.translation import ugettext as _, ugettext_lazy as _lazy
import commonware.log
from olympia import amo
from olympia.accounts.views import fxa_error_message
from olympia.amo.fields import HttpHttpsOnlyURLField
from olympia.users import notifications
from olympia.amo.utils import clean_nl, has_links, slug_validator
from olympia.lib import happyforms
from olympia.translations import LOCALES
from . import tasks
from .models import (
UserProfile, UserNotification, BlacklistedName)
from .widgets import (
NotificationsSelectMultiple, RequiredCheckboxInput, RequiredEmailInput,
RequiredTextarea)
log = commonware.log.getLogger('z.users')
admin_re = re.compile('(?=.*\d)(?=.*[a-zA-Z])')
class UserDeleteForm(forms.Form):
email = forms.CharField(max_length=255, required=True,
widget=RequiredEmailInput)
confirm = forms.BooleanField(required=True, widget=RequiredCheckboxInput)
def __init__(self, *args, **kwargs):
self.request = kwargs.pop('request', None)
super(UserDeleteForm, self).__init__(*args, **kwargs)
self.fields['email'].widget.attrs['placeholder'] = (
self.request.user.email)
def clean_email(self):
user_email = self.request.user.email
if not user_email == self.cleaned_data['email']:
raise forms.ValidationError(_('Email must be {email}.').format(
email=user_email))
def clean(self):
amouser = self.request.user
if amouser.is_developer:
# This is tampering because the form isn't shown on the page if the
# user is a developer
log.warning(u'[Tampering] Attempt to delete developer account (%s)'
% self.request.user)
raise forms.ValidationError("")
class UserEditForm(happyforms.ModelForm):
username = forms.CharField(max_length=50, required=False)
display_name = forms.CharField(label=_lazy(u'Display Name'), max_length=50,
required=False)
location = forms.CharField(label=_lazy(u'Location'), max_length=100,
required=False)
occupation = forms.CharField(label=_lazy(u'Occupation'), max_length=100,
required=False)
homepage = HttpHttpsOnlyURLField(label=_lazy(u'Homepage'), required=False)
email = forms.EmailField(
required=False,
help_text=fxa_error_message(
_(u'Firefox Accounts users cannot currently change their email '
u'address.')),
widget=forms.EmailInput(attrs={'readonly': 'readonly'}))
photo = forms.FileField(label=_lazy(u'Profile Photo'), required=False)
notifications = forms.MultipleChoiceField(
choices=[],
widget=NotificationsSelectMultiple,
initial=notifications.NOTIFICATIONS_DEFAULT,
required=False)
lang = forms.TypedChoiceField(label=_lazy(u'Default locale'),
choices=LOCALES)
def __init__(self, *args, **kwargs):
self.request = kwargs.pop('request', None)
instance = kwargs.get('instance')
if instance and instance.has_anonymous_username():
kwargs.setdefault('initial', {})
kwargs['initial']['username'] = ''
super(UserEditForm, self).__init__(*args, **kwargs)
errors = {'invalid': _('This URL has an invalid format. '
'Valid URLs look like '
'http://example.com/my_page.')}
self.fields['homepage'].error_messages = errors
if not self.instance.lang and self.request:
self.initial['lang'] = self.request.LANG
if self.instance:
default = dict((i, n.default_checked) for i, n
in notifications.NOTIFICATIONS_BY_ID.items())
user = dict((n.notification_id, n.enabled) for n
in self.instance.notifications.all())
default.update(user)
# Add choices to Notification.
choices = notifications.NOTIFICATIONS_CHOICES
if not self.instance.is_developer:
choices = notifications.NOTIFICATIONS_CHOICES_NOT_DEV
# Append a "NEW" message to new notification options.
saved = self.instance.notifications.values_list('notification_id',
flat=True)
self.choices_status = {}
for idx, label in choices:
self.choices_status[idx] = idx not in saved
self.fields['notifications'].choices = choices
self.fields['notifications'].initial = [i for i, v
in default.items() if v]
self.fields['notifications'].widget.form_instance = self
class Meta:
model = UserProfile
fields = (
'username', 'email', 'display_name', 'location', 'occupation',
'homepage', 'photo', 'lang', 'bio', 'display_collections',
'display_collections_fav', 'notifications',
)
def clean_username(self):
name = self.cleaned_data['username']
if not name:
if self.instance.has_anonymous_username():
name = self.instance.username
else:
name = self.instance.anonymize_username()
# All-digits usernames are disallowed since they can be
# confused for user IDs in URLs. (See bug 862121.)
if name.isdigit():
raise forms.ValidationError(
_('Usernames cannot contain only digits.'))
slug_validator(
name, lower=False,
message=_('Enter a valid username consisting of letters, numbers, '
'underscores or hyphens.'))
if BlacklistedName.blocked(name):
raise forms.ValidationError(_('This username cannot be used.'))
# FIXME: Bug 858452. Remove this check when collation of the username
# column is changed to case insensitive.
if (UserProfile.objects.exclude(id=self.instance.id)
.filter(username__iexact=name).exists()):
raise forms.ValidationError(_('This username is already in use.'))
return name
def clean_display_name(self):
name = self.cleaned_data['display_name']
if BlacklistedName.blocked(name):
raise forms.ValidationError(_('This display name cannot be used.'))
return name
def clean_email(self):
# TODO(django 1.9): Change the field to disabled=True and remove this.
return self.instance.email
def clean_photo(self):
photo = self.cleaned_data['photo']
if not photo:
return
if photo.content_type not in ('image/png', 'image/jpeg'):
raise forms.ValidationError(
_('Images must be either PNG or JPG.'))
if photo.size > settings.MAX_PHOTO_UPLOAD_SIZE:
raise forms.ValidationError(
_('Please use images smaller than %dMB.' %
(settings.MAX_PHOTO_UPLOAD_SIZE / 1024 / 1024 - 1)))
return photo
def clean_bio(self):
bio = self.cleaned_data['bio']
normalized = clean_nl(unicode(bio))
if has_links(normalized):
# There's some links, we don't want them.
raise forms.ValidationError(_('No links are allowed.'))
return bio
def save(self, log_for_developer=True):
u = super(UserEditForm, self).save(commit=False)
data = self.cleaned_data
photo = data['photo']
if photo:
u.picture_type = 'image/png'
tmp_destination = u.picture_path + '__unconverted'
with storage.open(tmp_destination, 'wb') as fh:
for chunk in photo.chunks():
fh.write(chunk)
tasks.resize_photo.delay(tmp_destination, u.picture_path,
set_modified_on=[u])
for (i, n) in notifications.NOTIFICATIONS_BY_ID.items():
enabled = n.mandatory or (str(i) in data['notifications'])
UserNotification.update_or_create(
user=u, notification_id=i, update={'enabled': enabled})
log.debug(u'User (%s) updated their profile' % u)
u.save()
return u
class AdminUserEditForm(UserEditForm):
"""This is the form used by admins to edit users' info."""
email = forms.EmailField(widget=RequiredEmailInput)
admin_log = forms.CharField(required=True, label='Reason for change',
widget=RequiredTextarea(attrs={'rows': 4}))
notes = forms.CharField(required=False, label='Notes',
widget=forms.Textarea(attrs={'rows': 4}))
anonymize = forms.BooleanField(required=False)
def changed_fields(self):
"""Returns changed_data ignoring these fields."""
return (set(self.changed_data) -
set(['admin_log', 'notifications', 'photo']))
def changes(self):
"""A dictionary of changed fields, old, new."""
details = dict([(k, (self.initial[k], self.cleaned_data[k]))
for k in self.changed_fields()])
return details
def clean_anonymize(self):
if (self.cleaned_data['anonymize'] and
self.changed_fields() != set(['anonymize'])):
raise forms.ValidationError(_('To anonymize, enter a reason for'
' the change but do not change any'
' other field.'))
return self.cleaned_data['anonymize']
def clean_email(self):
return self.cleaned_data['email']
def save(self, *args, **kw):
profile = super(AdminUserEditForm, self).save(log_for_developer=False)
if self.cleaned_data['anonymize']:
amo.log(amo.LOG.ADMIN_USER_ANONYMIZED, self.instance,
self.cleaned_data['admin_log'])
profile.anonymize() # This also logs
else:
amo.log(amo.LOG.ADMIN_USER_EDITED, self.instance,
self.cleaned_data['admin_log'], details=self.changes())
log.info('Admin edit user: %s changed fields: %s' %
(self.instance, self.changed_fields()))
return profile
class BlacklistedNameAddForm(forms.Form):
"""Form for adding blacklisted names in bulk fashion."""
names = forms.CharField(widget=forms.Textarea(
attrs={'cols': 40, 'rows': 16}))
def clean_names(self):
names = self.cleaned_data['names'].strip()
if not names:
raise forms.ValidationError(
_('Please enter at least one name to blacklist.'))
names = os.linesep.join(
[s.strip() for s in names.splitlines() if s.strip()])
return names
| 39.212014 | 79 | 0.605119 | import os
import re
from django import forms
from django.conf import settings
from django.core.files.storage import default_storage as storage
from django.utils.translation import ugettext as _, ugettext_lazy as _lazy
import commonware.log
from olympia import amo
from olympia.accounts.views import fxa_error_message
from olympia.amo.fields import HttpHttpsOnlyURLField
from olympia.users import notifications
from olympia.amo.utils import clean_nl, has_links, slug_validator
from olympia.lib import happyforms
from olympia.translations import LOCALES
from . import tasks
from .models import (
UserProfile, UserNotification, BlacklistedName)
from .widgets import (
NotificationsSelectMultiple, RequiredCheckboxInput, RequiredEmailInput,
RequiredTextarea)
log = commonware.log.getLogger('z.users')
admin_re = re.compile('(?=.*\d)(?=.*[a-zA-Z])')
class UserDeleteForm(forms.Form):
email = forms.CharField(max_length=255, required=True,
widget=RequiredEmailInput)
confirm = forms.BooleanField(required=True, widget=RequiredCheckboxInput)
def __init__(self, *args, **kwargs):
self.request = kwargs.pop('request', None)
super(UserDeleteForm, self).__init__(*args, **kwargs)
self.fields['email'].widget.attrs['placeholder'] = (
self.request.user.email)
def clean_email(self):
user_email = self.request.user.email
if not user_email == self.cleaned_data['email']:
raise forms.ValidationError(_('Email must be {email}.').format(
email=user_email))
def clean(self):
amouser = self.request.user
if amouser.is_developer:
# user is a developer
log.warning(u'[Tampering] Attempt to delete developer account (%s)'
% self.request.user)
raise forms.ValidationError("")
class UserEditForm(happyforms.ModelForm):
username = forms.CharField(max_length=50, required=False)
display_name = forms.CharField(label=_lazy(u'Display Name'), max_length=50,
required=False)
location = forms.CharField(label=_lazy(u'Location'), max_length=100,
required=False)
occupation = forms.CharField(label=_lazy(u'Occupation'), max_length=100,
required=False)
homepage = HttpHttpsOnlyURLField(label=_lazy(u'Homepage'), required=False)
email = forms.EmailField(
required=False,
help_text=fxa_error_message(
_(u'Firefox Accounts users cannot currently change their email '
u'address.')),
widget=forms.EmailInput(attrs={'readonly': 'readonly'}))
photo = forms.FileField(label=_lazy(u'Profile Photo'), required=False)
notifications = forms.MultipleChoiceField(
choices=[],
widget=NotificationsSelectMultiple,
initial=notifications.NOTIFICATIONS_DEFAULT,
required=False)
lang = forms.TypedChoiceField(label=_lazy(u'Default locale'),
choices=LOCALES)
def __init__(self, *args, **kwargs):
self.request = kwargs.pop('request', None)
instance = kwargs.get('instance')
if instance and instance.has_anonymous_username():
kwargs.setdefault('initial', {})
kwargs['initial']['username'] = ''
super(UserEditForm, self).__init__(*args, **kwargs)
errors = {'invalid': _('This URL has an invalid format. '
'Valid URLs look like '
'http://example.com/my_page.')}
self.fields['homepage'].error_messages = errors
if not self.instance.lang and self.request:
self.initial['lang'] = self.request.LANG
if self.instance:
default = dict((i, n.default_checked) for i, n
in notifications.NOTIFICATIONS_BY_ID.items())
user = dict((n.notification_id, n.enabled) for n
in self.instance.notifications.all())
default.update(user)
# Add choices to Notification.
choices = notifications.NOTIFICATIONS_CHOICES
if not self.instance.is_developer:
choices = notifications.NOTIFICATIONS_CHOICES_NOT_DEV
# Append a "NEW" message to new notification options.
saved = self.instance.notifications.values_list('notification_id',
flat=True)
self.choices_status = {}
for idx, label in choices:
self.choices_status[idx] = idx not in saved
self.fields['notifications'].choices = choices
self.fields['notifications'].initial = [i for i, v
in default.items() if v]
self.fields['notifications'].widget.form_instance = self
class Meta:
model = UserProfile
fields = (
'username', 'email', 'display_name', 'location', 'occupation',
'homepage', 'photo', 'lang', 'bio', 'display_collections',
'display_collections_fav', 'notifications',
)
def clean_username(self):
name = self.cleaned_data['username']
if not name:
if self.instance.has_anonymous_username():
name = self.instance.username
else:
name = self.instance.anonymize_username()
# All-digits usernames are disallowed since they can be
# confused for user IDs in URLs. (See bug 862121.)
if name.isdigit():
raise forms.ValidationError(
_('Usernames cannot contain only digits.'))
slug_validator(
name, lower=False,
message=_('Enter a valid username consisting of letters, numbers, '
'underscores or hyphens.'))
if BlacklistedName.blocked(name):
raise forms.ValidationError(_('This username cannot be used.'))
# FIXME: Bug 858452. Remove this check when collation of the username
# column is changed to case insensitive.
if (UserProfile.objects.exclude(id=self.instance.id)
.filter(username__iexact=name).exists()):
raise forms.ValidationError(_('This username is already in use.'))
return name
def clean_display_name(self):
name = self.cleaned_data['display_name']
if BlacklistedName.blocked(name):
raise forms.ValidationError(_('This display name cannot be used.'))
return name
def clean_email(self):
# TODO(django 1.9): Change the field to disabled=True and remove this.
return self.instance.email
def clean_photo(self):
photo = self.cleaned_data['photo']
if not photo:
return
if photo.content_type not in ('image/png', 'image/jpeg'):
raise forms.ValidationError(
_('Images must be either PNG or JPG.'))
if photo.size > settings.MAX_PHOTO_UPLOAD_SIZE:
raise forms.ValidationError(
_('Please use images smaller than %dMB.' %
(settings.MAX_PHOTO_UPLOAD_SIZE / 1024 / 1024 - 1)))
return photo
def clean_bio(self):
bio = self.cleaned_data['bio']
normalized = clean_nl(unicode(bio))
if has_links(normalized):
# There's some links, we don't want them.
raise forms.ValidationError(_('No links are allowed.'))
return bio
def save(self, log_for_developer=True):
u = super(UserEditForm, self).save(commit=False)
data = self.cleaned_data
photo = data['photo']
if photo:
u.picture_type = 'image/png'
tmp_destination = u.picture_path + '__unconverted'
with storage.open(tmp_destination, 'wb') as fh:
for chunk in photo.chunks():
fh.write(chunk)
tasks.resize_photo.delay(tmp_destination, u.picture_path,
set_modified_on=[u])
for (i, n) in notifications.NOTIFICATIONS_BY_ID.items():
enabled = n.mandatory or (str(i) in data['notifications'])
UserNotification.update_or_create(
user=u, notification_id=i, update={'enabled': enabled})
log.debug(u'User (%s) updated their profile' % u)
u.save()
return u
class AdminUserEditForm(UserEditForm):
email = forms.EmailField(widget=RequiredEmailInput)
admin_log = forms.CharField(required=True, label='Reason for change',
widget=RequiredTextarea(attrs={'rows': 4}))
notes = forms.CharField(required=False, label='Notes',
widget=forms.Textarea(attrs={'rows': 4}))
anonymize = forms.BooleanField(required=False)
def changed_fields(self):
return (set(self.changed_data) -
set(['admin_log', 'notifications', 'photo']))
def changes(self):
details = dict([(k, (self.initial[k], self.cleaned_data[k]))
for k in self.changed_fields()])
return details
def clean_anonymize(self):
if (self.cleaned_data['anonymize'] and
self.changed_fields() != set(['anonymize'])):
raise forms.ValidationError(_('To anonymize, enter a reason for'
' the change but do not change any'
' other field.'))
return self.cleaned_data['anonymize']
def clean_email(self):
return self.cleaned_data['email']
def save(self, *args, **kw):
profile = super(AdminUserEditForm, self).save(log_for_developer=False)
if self.cleaned_data['anonymize']:
amo.log(amo.LOG.ADMIN_USER_ANONYMIZED, self.instance,
self.cleaned_data['admin_log'])
profile.anonymize() # This also logs
else:
amo.log(amo.LOG.ADMIN_USER_EDITED, self.instance,
self.cleaned_data['admin_log'], details=self.changes())
log.info('Admin edit user: %s changed fields: %s' %
(self.instance, self.changed_fields()))
return profile
class BlacklistedNameAddForm(forms.Form):
names = forms.CharField(widget=forms.Textarea(
attrs={'cols': 40, 'rows': 16}))
def clean_names(self):
names = self.cleaned_data['names'].strip()
if not names:
raise forms.ValidationError(
_('Please enter at least one name to blacklist.'))
names = os.linesep.join(
[s.strip() for s in names.splitlines() if s.strip()])
return names
| true | true |
f71cf3759c1466e7c090211971d0be2e4fd58451 | 6,133 | py | Python | ros/src/tl_detector/tl_detector.py | sidharth2189/CarND-Capstone | 1d478aa0ca28f4d87a204ffd7032fccd12513c34 | [
"MIT"
] | 2 | 2019-04-29T10:25:31.000Z | 2019-04-29T18:14:08.000Z | ros/src/tl_detector/tl_detector.py | sidharth2189/CarND-Capstone | 1d478aa0ca28f4d87a204ffd7032fccd12513c34 | [
"MIT"
] | null | null | null | ros/src/tl_detector/tl_detector.py | sidharth2189/CarND-Capstone | 1d478aa0ca28f4d87a204ffd7032fccd12513c34 | [
"MIT"
] | 1 | 2019-04-30T01:03:03.000Z | 2019-04-30T01:03:03.000Z | #!/usr/bin/env python
import rospy
from scipy.spatial import KDTree
from std_msgs.msg import Int32
from geometry_msgs.msg import PoseStamped, Pose
from styx_msgs.msg import TrafficLightArray, TrafficLight
from styx_msgs.msg import Lane
from sensor_msgs.msg import Image
from cv_bridge import CvBridge
from light_classification.tl_classifier import TLClassifier
import tf
import cv2
import yaml
STATE_COUNT_THRESHOLD = 3
class TLDetector(object):
def __init__(self):
rospy.init_node('tl_detector')
self.pose = None
self.waypoints = None
self.camera_image = None
self.lights = []
sub1 = rospy.Subscriber('/current_pose', PoseStamped, self.pose_cb)
sub2 = rospy.Subscriber('/base_waypoints', Lane, self.waypoints_cb)
'''
/vehicle/traffic_lights provides you with the location of the traffic light in 3D map space and
helps you acquire an accurate ground truth data source for the traffic light
classifier by sending the current color state of all traffic lights in the
simulator. When testing on the vehicle, the color state will not be available. You'll need to
rely on the position of the light and the camera image to predict it.
'''
sub3 = rospy.Subscriber('/vehicle/traffic_lights', TrafficLightArray, self.traffic_cb)
sub6 = rospy.Subscriber('/image_color', Image, self.image_cb)
config_string = rospy.get_param("/traffic_light_config")
self.config = yaml.load(config_string)
self.upcoming_red_light_pub = rospy.Publisher('/traffic_waypoint', Int32, queue_size=1)
self.bridge = CvBridge()
self.light_classifier = TLClassifier()
self.listener = tf.TransformListener()
self.state = TrafficLight.UNKNOWN
self.last_state = TrafficLight.UNKNOWN
self.last_wp = -1
self.state_count = 0
self.waypoints_2D = None
self.waypoint_tree = None
rospy.spin()
def pose_cb(self, msg):
self.pose = msg
def waypoints_cb(self, waypoints):
self.waypoints = waypoints
if not self.waypoints_2D:
self.waypoints_2D = [[waypoint.pose.pose.position.x, waypoint.pose.pose.position.y] for waypoint in waypoints.waypoints]
self.waypoint_tree = KDTree(self.waypoints_2D)
def traffic_cb(self, msg):
self.lights = msg.lights
def image_cb(self, msg):
"""Identifies red lights in the incoming camera image and publishes the index
of the waypoint closest to the red light's stop line to /traffic_waypoint
Args:
msg (Image): image from car-mounted camera
"""
self.has_image = True
self.camera_image = msg
light_wp, state = self.process_traffic_lights()
'''
Publish upcoming red lights at camera frequency.
Each predicted state has to occur `STATE_COUNT_THRESHOLD` number
of times till we start using it. Otherwise the previous stable state is
used.
'''
if self.state != state:
self.state_count = 0
self.state = state
elif self.state_count >= STATE_COUNT_THRESHOLD:
self.last_state = self.state
light_wp = light_wp if state == TrafficLight.RED else -1
self.last_wp = light_wp
self.upcoming_red_light_pub.publish(Int32(light_wp))
else:
self.upcoming_red_light_pub.publish(Int32(self.last_wp))
self.state_count += 1
def get_closest_waypoint(self, x, y):
"""Identifies the closest path waypoint to the given position
https://en.wikipedia.org/wiki/Closest_pair_of_points_problem
Args:
pose (Pose): position to match a waypoint to
Returns:
int: index of the closest waypoint in self.waypoints
"""
#TODO implement
closest_idx = self.waypoint_tree.query([x, y], 1)[1]
return closest_idx
def get_light_state(self, light):
"""Determines the current color of the traffic light
Args:
light (TrafficLight): light to classify
Returns:
int: ID of traffic light color (specified in styx_msgs/TrafficLight)
"""
# For testing, just return the light state
return light.state
#if(not self.has_image):
# self.prev_light_loc = None
# return False
#cv_image = self.bridge.imgmsg_to_cv2(self.camera_image, "bgr8")
##Get classification
#return self.light_classifier.get_classification(cv_image)
def process_traffic_lights(self):
"""Finds closest visible traffic light, if one exists, and determines its
location and color
Returns:
int: index of waypoint closes to the upcoming stop line for a traffic light (-1 if none exists)
int: ID of traffic light color (specified in styx_msgs/TrafficLight)
"""
closest_light = None # closest traffic light
line_wp_idx = None
# List of positions that correspond to the line to stop in front of for a given intersection
stop_line_positions = self.config['stop_line_positions']
if(self.pose):
car_wp_idx = self.get_closest_waypoint(self.pose.pose.position.x, self.pose.pose.position.y)
#TODO find the closest visible traffic light (if one exists)
diff = len(self.waypoints.waypoints)
for i, light in enumerate(self.lights):
# Get stop line waypoint index
line = stop_line_positions[i]
temp_wp_idx = self.get_closest_waypoint(line[0], line[1])
# Find closest stop line waypoint index
d = temp_wp_idx - car_wp_idx
if d > 0 and d < diff:
diff = d
closest_light = light
line_wp_idx = temp_wp_idx
if closest_light:
state = self.get_light_state(closest_light)
return line_wp_idx, state
return -1, TrafficLight.UNKNOWN
if __name__ == '__main__':
try:
TLDetector()
except rospy.ROSInterruptException:
rospy.logerr('Could not start traffic node.')
| 34.846591 | 125 | 0.661992 |
import rospy
from scipy.spatial import KDTree
from std_msgs.msg import Int32
from geometry_msgs.msg import PoseStamped, Pose
from styx_msgs.msg import TrafficLightArray, TrafficLight
from styx_msgs.msg import Lane
from sensor_msgs.msg import Image
from cv_bridge import CvBridge
from light_classification.tl_classifier import TLClassifier
import tf
import cv2
import yaml
STATE_COUNT_THRESHOLD = 3
class TLDetector(object):
def __init__(self):
rospy.init_node('tl_detector')
self.pose = None
self.waypoints = None
self.camera_image = None
self.lights = []
sub1 = rospy.Subscriber('/current_pose', PoseStamped, self.pose_cb)
sub2 = rospy.Subscriber('/base_waypoints', Lane, self.waypoints_cb)
'''
/vehicle/traffic_lights provides you with the location of the traffic light in 3D map space and
helps you acquire an accurate ground truth data source for the traffic light
classifier by sending the current color state of all traffic lights in the
simulator. When testing on the vehicle, the color state will not be available. You'll need to
rely on the position of the light and the camera image to predict it.
'''
sub3 = rospy.Subscriber('/vehicle/traffic_lights', TrafficLightArray, self.traffic_cb)
sub6 = rospy.Subscriber('/image_color', Image, self.image_cb)
config_string = rospy.get_param("/traffic_light_config")
self.config = yaml.load(config_string)
self.upcoming_red_light_pub = rospy.Publisher('/traffic_waypoint', Int32, queue_size=1)
self.bridge = CvBridge()
self.light_classifier = TLClassifier()
self.listener = tf.TransformListener()
self.state = TrafficLight.UNKNOWN
self.last_state = TrafficLight.UNKNOWN
self.last_wp = -1
self.state_count = 0
self.waypoints_2D = None
self.waypoint_tree = None
rospy.spin()
def pose_cb(self, msg):
self.pose = msg
def waypoints_cb(self, waypoints):
self.waypoints = waypoints
if not self.waypoints_2D:
self.waypoints_2D = [[waypoint.pose.pose.position.x, waypoint.pose.pose.position.y] for waypoint in waypoints.waypoints]
self.waypoint_tree = KDTree(self.waypoints_2D)
def traffic_cb(self, msg):
self.lights = msg.lights
def image_cb(self, msg):
"""Identifies red lights in the incoming camera image and publishes the index
of the waypoint closest to the red light's stop line to /traffic_waypoint
Args:
msg (Image): image from car-mounted camera
"""
self.has_image = True
self.camera_image = msg
light_wp, state = self.process_traffic_lights()
'''
Publish upcoming red lights at camera frequency.
Each predicted state has to occur `STATE_COUNT_THRESHOLD` number
of times till we start using it. Otherwise the previous stable state is
used.
'''
if self.state != state:
self.state_count = 0
self.state = state
elif self.state_count >= STATE_COUNT_THRESHOLD:
self.last_state = self.state
light_wp = light_wp if state == TrafficLight.RED else -1
self.last_wp = light_wp
self.upcoming_red_light_pub.publish(Int32(light_wp))
else:
self.upcoming_red_light_pub.publish(Int32(self.last_wp))
self.state_count += 1
def get_closest_waypoint(self, x, y):
"""Identifies the closest path waypoint to the given position
https://en.wikipedia.org/wiki/Closest_pair_of_points_problem
Args:
pose (Pose): position to match a waypoint to
Returns:
int: index of the closest waypoint in self.waypoints
"""
closest_idx = self.waypoint_tree.query([x, y], 1)[1]
return closest_idx
def get_light_state(self, light):
"""Determines the current color of the traffic light
Args:
light (TrafficLight): light to classify
Returns:
int: ID of traffic light color (specified in styx_msgs/TrafficLight)
"""
return light.state
process_traffic_lights(self):
"""Finds closest visible traffic light, if one exists, and determines its
location and color
Returns:
int: index of waypoint closes to the upcoming stop line for a traffic light (-1 if none exists)
int: ID of traffic light color (specified in styx_msgs/TrafficLight)
"""
closest_light = None
line_wp_idx = None
stop_line_positions = self.config['stop_line_positions']
if(self.pose):
car_wp_idx = self.get_closest_waypoint(self.pose.pose.position.x, self.pose.pose.position.y)
diff = len(self.waypoints.waypoints)
for i, light in enumerate(self.lights):
line = stop_line_positions[i]
temp_wp_idx = self.get_closest_waypoint(line[0], line[1])
d = temp_wp_idx - car_wp_idx
if d > 0 and d < diff:
diff = d
closest_light = light
line_wp_idx = temp_wp_idx
if closest_light:
state = self.get_light_state(closest_light)
return line_wp_idx, state
return -1, TrafficLight.UNKNOWN
if __name__ == '__main__':
try:
TLDetector()
except rospy.ROSInterruptException:
rospy.logerr('Could not start traffic node.')
| false | true |
f71cf4a9fd2cce5b7e524e3da45d2e4d49508fe0 | 2,256 | py | Python | Data/MarvinData.py | PatrickKutch/FUDD | faf36e24b7da99b75764f411586a823a172e4d01 | [
"Apache-2.0"
] | null | null | null | Data/MarvinData.py | PatrickKutch/FUDD | faf36e24b7da99b75764f411586a823a172e4d01 | [
"Apache-2.0"
] | null | null | null | Data/MarvinData.py | PatrickKutch/FUDD | faf36e24b7da99b75764f411586a823a172e4d01 | [
"Apache-2.0"
] | null | null | null | ##############################################################################
# Copyright (c) 2016 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
# File Abstract:
# Wrapper class for a piece of data, could be from file or from network
#
##############################################################################
from Util import Time
class MarvinData(object):
def __init__(self,Namespace,ID,Value,ElapsedTime,FormatVersion,isLive=True):
#from Helpers import Configuration
self.FormatVersion=FormatVersion
self.Value = Value
if True == isLive:
self.ArrivalTime = Time.GetCurrMS()
else:
self.ArrivalTime = ElapsedTime
self.Namespace = Namespace
#self.Namespace = Configuration.get().HandleBITWNamespace(Namespace) # if Bump in the Wire, change NS
self.ID = ID
self.Live = isLive
def ToXML(self,destIsFile=False):
startCDATA="<![CDATA["
endCDATA="]]>"
if False == destIsFile:
buffer = "<?xml version=\"1.0\" encoding=\"utf-8\"?>"
else:
buffer = ""
buffer = buffer + "<Oscar Type=\"Data\">"
buffer = buffer + "<Version>1</Version>"
buffer = buffer + "<Namespace>"+self.Namespace+"</Namespace>"
buffer = buffer + "<ID>"+self.ID+"</ID>"
if False == destIsFile:
buffer = buffer + "<Value LiveData=\""+str(self.Live)+"\">"+startCDATA+self.Value+endCDATA+"</Value>"
else:
buffer = buffer + "<Value>"+self.Value+"</Value>"
buffer = buffer + "</Oscar>"
return buffer
| 36.983607 | 113 | 0.557624 | true | true | |
f71cf7333084978808cab1d46f6e030e28cd6846 | 19,083 | py | Python | Lib/test/test_site.py | inging44/python3 | fcd8d9d2ee54b46b757ecf34f284b4e60a43097a | [
"bzip2-1.0.6"
] | 1,872 | 2015-01-02T18:56:47.000Z | 2022-03-31T07:34:39.000Z | Lib/test/test_site.py | inging44/python3 | fcd8d9d2ee54b46b757ecf34f284b4e60a43097a | [
"bzip2-1.0.6"
] | 675 | 2015-02-27T09:01:01.000Z | 2022-03-31T14:03:25.000Z | Lib/test/test_site.py | inging44/python3 | fcd8d9d2ee54b46b757ecf34f284b4e60a43097a | [
"bzip2-1.0.6"
] | 278 | 2015-01-02T03:48:20.000Z | 2022-03-29T20:40:44.000Z | """Tests for 'site'.
Tests assume the initial paths in sys.path once the interpreter has begun
executing have not been removed.
"""
import unittest
import test.support
from test.support import captured_stderr, TESTFN, EnvironmentVarGuard
import builtins
import os
import sys
import re
import encodings
import urllib.request
import urllib.error
import subprocess
import sysconfig
from copy import copy
# These tests are not particularly useful if Python was invoked with -S.
# If you add tests that are useful under -S, this skip should be moved
# to the class level.
if sys.flags.no_site:
raise unittest.SkipTest("Python was invoked with -S")
import site
if site.ENABLE_USER_SITE and not os.path.isdir(site.USER_SITE):
# need to add user site directory for tests
os.makedirs(site.USER_SITE)
site.addsitedir(site.USER_SITE)
class HelperFunctionsTests(unittest.TestCase):
"""Tests for helper functions.
"""
def setUp(self):
"""Save a copy of sys.path"""
self.sys_path = sys.path[:]
self.old_base = site.USER_BASE
self.old_site = site.USER_SITE
self.old_prefixes = site.PREFIXES
self.original_vars = sysconfig._CONFIG_VARS
self.old_vars = copy(sysconfig._CONFIG_VARS)
def tearDown(self):
"""Restore sys.path"""
sys.path[:] = self.sys_path
site.USER_BASE = self.old_base
site.USER_SITE = self.old_site
site.PREFIXES = self.old_prefixes
sysconfig._CONFIG_VARS = self.original_vars
sysconfig._CONFIG_VARS.clear()
sysconfig._CONFIG_VARS.update(self.old_vars)
def test_makepath(self):
# Test makepath() have an absolute path for its first return value
# and a case-normalized version of the absolute path for its
# second value.
path_parts = ("Beginning", "End")
original_dir = os.path.join(*path_parts)
abs_dir, norm_dir = site.makepath(*path_parts)
self.assertEqual(os.path.abspath(original_dir), abs_dir)
if original_dir == os.path.normcase(original_dir):
self.assertEqual(abs_dir, norm_dir)
else:
self.assertEqual(os.path.normcase(abs_dir), norm_dir)
def test_init_pathinfo(self):
dir_set = site._init_pathinfo()
for entry in [site.makepath(path)[1] for path in sys.path
if path and os.path.isdir(path)]:
self.assertIn(entry, dir_set,
"%s from sys.path not found in set returned "
"by _init_pathinfo(): %s" % (entry, dir_set))
def pth_file_tests(self, pth_file):
"""Contain common code for testing results of reading a .pth file"""
self.assertIn(pth_file.imported, sys.modules,
"%s not in sys.modules" % pth_file.imported)
self.assertIn(site.makepath(pth_file.good_dir_path)[0], sys.path)
self.assertFalse(os.path.exists(pth_file.bad_dir_path))
def test_addpackage(self):
# Make sure addpackage() imports if the line starts with 'import',
# adds directories to sys.path for any line in the file that is not a
# comment or import that is a valid directory name for where the .pth
# file resides; invalid directories are not added
pth_file = PthFile()
pth_file.cleanup(prep=True) # to make sure that nothing is
# pre-existing that shouldn't be
try:
pth_file.create()
site.addpackage(pth_file.base_dir, pth_file.filename, set())
self.pth_file_tests(pth_file)
finally:
pth_file.cleanup()
def make_pth(self, contents, pth_dir='.', pth_name=TESTFN):
# Create a .pth file and return its (abspath, basename).
pth_dir = os.path.abspath(pth_dir)
pth_basename = pth_name + '.pth'
pth_fn = os.path.join(pth_dir, pth_basename)
pth_file = open(pth_fn, 'w', encoding='utf-8')
self.addCleanup(lambda: os.remove(pth_fn))
pth_file.write(contents)
pth_file.close()
return pth_dir, pth_basename
def test_addpackage_import_bad_syntax(self):
# Issue 10642
pth_dir, pth_fn = self.make_pth("import bad)syntax\n")
with captured_stderr() as err_out:
site.addpackage(pth_dir, pth_fn, set())
self.assertRegex(err_out.getvalue(), "line 1")
self.assertRegex(err_out.getvalue(),
re.escape(os.path.join(pth_dir, pth_fn)))
# XXX: the previous two should be independent checks so that the
# order doesn't matter. The next three could be a single check
# but my regex foo isn't good enough to write it.
self.assertRegex(err_out.getvalue(), 'Traceback')
self.assertRegex(err_out.getvalue(), r'import bad\)syntax')
self.assertRegex(err_out.getvalue(), 'SyntaxError')
def test_addpackage_import_bad_exec(self):
# Issue 10642
pth_dir, pth_fn = self.make_pth("randompath\nimport nosuchmodule\n")
with captured_stderr() as err_out:
site.addpackage(pth_dir, pth_fn, set())
self.assertRegex(err_out.getvalue(), "line 2")
self.assertRegex(err_out.getvalue(),
re.escape(os.path.join(pth_dir, pth_fn)))
# XXX: ditto previous XXX comment.
self.assertRegex(err_out.getvalue(), 'Traceback')
self.assertRegex(err_out.getvalue(), 'ImportError')
@unittest.skipIf(sys.platform == "win32", "Windows does not raise an "
"error for file paths containing null characters")
def test_addpackage_import_bad_pth_file(self):
# Issue 5258
pth_dir, pth_fn = self.make_pth("abc\x00def\n")
with captured_stderr() as err_out:
site.addpackage(pth_dir, pth_fn, set())
self.assertRegex(err_out.getvalue(), "line 1")
self.assertRegex(err_out.getvalue(),
re.escape(os.path.join(pth_dir, pth_fn)))
# XXX: ditto previous XXX comment.
self.assertRegex(err_out.getvalue(), 'Traceback')
self.assertRegex(err_out.getvalue(), 'TypeError')
def test_addsitedir(self):
# Same tests for test_addpackage since addsitedir() essentially just
# calls addpackage() for every .pth file in the directory
pth_file = PthFile()
pth_file.cleanup(prep=True) # Make sure that nothing is pre-existing
# that is tested for
try:
pth_file.create()
site.addsitedir(pth_file.base_dir, set())
self.pth_file_tests(pth_file)
finally:
pth_file.cleanup()
@unittest.skipUnless(site.ENABLE_USER_SITE, "requires access to PEP 370 "
"user-site (site.ENABLE_USER_SITE)")
def test_s_option(self):
usersite = site.USER_SITE
self.assertIn(usersite, sys.path)
env = os.environ.copy()
rc = subprocess.call([sys.executable, '-c',
'import sys; sys.exit(%r in sys.path)' % usersite],
env=env)
self.assertEqual(rc, 1)
env = os.environ.copy()
rc = subprocess.call([sys.executable, '-s', '-c',
'import sys; sys.exit(%r in sys.path)' % usersite],
env=env)
if usersite == site.getsitepackages()[0]:
self.assertEqual(rc, 1)
else:
self.assertEqual(rc, 0)
env = os.environ.copy()
env["PYTHONNOUSERSITE"] = "1"
rc = subprocess.call([sys.executable, '-c',
'import sys; sys.exit(%r in sys.path)' % usersite],
env=env)
if usersite == site.getsitepackages()[0]:
self.assertEqual(rc, 1)
else:
self.assertEqual(rc, 0)
env = os.environ.copy()
env["PYTHONUSERBASE"] = "/tmp"
rc = subprocess.call([sys.executable, '-c',
'import sys, site; sys.exit(site.USER_BASE.startswith("/tmp"))'],
env=env)
self.assertEqual(rc, 1)
def test_getuserbase(self):
site.USER_BASE = None
user_base = site.getuserbase()
# the call sets site.USER_BASE
self.assertEqual(site.USER_BASE, user_base)
# let's set PYTHONUSERBASE and see if it uses it
site.USER_BASE = None
import sysconfig
sysconfig._CONFIG_VARS = None
with EnvironmentVarGuard() as environ:
environ['PYTHONUSERBASE'] = 'xoxo'
self.assertTrue(site.getuserbase().startswith('xoxo'),
site.getuserbase())
def test_getusersitepackages(self):
site.USER_SITE = None
site.USER_BASE = None
user_site = site.getusersitepackages()
# the call sets USER_BASE *and* USER_SITE
self.assertEqual(site.USER_SITE, user_site)
self.assertTrue(user_site.startswith(site.USER_BASE), user_site)
def test_getsitepackages(self):
site.PREFIXES = ['xoxo']
dirs = site.getsitepackages()
if (sys.platform == "darwin" and
sysconfig.get_config_var("PYTHONFRAMEWORK")):
# OS X framework builds
site.PREFIXES = ['Python.framework']
dirs = site.getsitepackages()
self.assertEqual(len(dirs), 3)
wanted = os.path.join('/Library',
sysconfig.get_config_var("PYTHONFRAMEWORK"),
sys.version[:3],
'site-packages')
self.assertEqual(dirs[2], wanted)
elif os.sep == '/':
# OS X non-framwework builds, Linux, FreeBSD, etc
self.assertEqual(len(dirs), 2)
wanted = os.path.join('xoxo', 'lib', 'python' + sys.version[:3],
'site-packages')
self.assertEqual(dirs[0], wanted)
wanted = os.path.join('xoxo', 'lib', 'site-python')
self.assertEqual(dirs[1], wanted)
else:
# other platforms
self.assertEqual(len(dirs), 2)
self.assertEqual(dirs[0], 'xoxo')
wanted = os.path.join('xoxo', 'lib', 'site-packages')
self.assertEqual(dirs[1], wanted)
class PthFile(object):
"""Helper class for handling testing of .pth files"""
def __init__(self, filename_base=TESTFN, imported="time",
good_dirname="__testdir__", bad_dirname="__bad"):
"""Initialize instance variables"""
self.filename = filename_base + ".pth"
self.base_dir = os.path.abspath('')
self.file_path = os.path.join(self.base_dir, self.filename)
self.imported = imported
self.good_dirname = good_dirname
self.bad_dirname = bad_dirname
self.good_dir_path = os.path.join(self.base_dir, self.good_dirname)
self.bad_dir_path = os.path.join(self.base_dir, self.bad_dirname)
def create(self):
"""Create a .pth file with a comment, blank lines, an ``import
<self.imported>``, a line with self.good_dirname, and a line with
self.bad_dirname.
Creation of the directory for self.good_dir_path (based off of
self.good_dirname) is also performed.
Make sure to call self.cleanup() to undo anything done by this method.
"""
FILE = open(self.file_path, 'w')
try:
print("#import @bad module name", file=FILE)
print("\n", file=FILE)
print("import %s" % self.imported, file=FILE)
print(self.good_dirname, file=FILE)
print(self.bad_dirname, file=FILE)
finally:
FILE.close()
os.mkdir(self.good_dir_path)
def cleanup(self, prep=False):
"""Make sure that the .pth file is deleted, self.imported is not in
sys.modules, and that both self.good_dirname and self.bad_dirname are
not existing directories."""
if os.path.exists(self.file_path):
os.remove(self.file_path)
if prep:
self.imported_module = sys.modules.get(self.imported)
if self.imported_module:
del sys.modules[self.imported]
else:
if self.imported_module:
sys.modules[self.imported] = self.imported_module
if os.path.exists(self.good_dir_path):
os.rmdir(self.good_dir_path)
if os.path.exists(self.bad_dir_path):
os.rmdir(self.bad_dir_path)
class ImportSideEffectTests(unittest.TestCase):
"""Test side-effects from importing 'site'."""
def setUp(self):
"""Make a copy of sys.path"""
self.sys_path = sys.path[:]
def tearDown(self):
"""Restore sys.path"""
sys.path[:] = self.sys_path
def test_abs_paths(self):
# Make sure all imported modules have their __file__ and __cached__
# attributes as absolute paths. Arranging to put the Lib directory on
# PYTHONPATH would cause the os module to have a relative path for
# __file__ if abs_paths() does not get run. sys and builtins (the
# only other modules imported before site.py runs) do not have
# __file__ or __cached__ because they are built-in.
parent = os.path.relpath(os.path.dirname(os.__file__))
env = os.environ.copy()
env['PYTHONPATH'] = parent
code = ('import os, sys',
# use ASCII to avoid locale issues with non-ASCII directories
'os_file = os.__file__.encode("ascii", "backslashreplace")',
r'sys.stdout.buffer.write(os_file + b"\n")',
'os_cached = os.__cached__.encode("ascii", "backslashreplace")',
r'sys.stdout.buffer.write(os_cached + b"\n")')
command = '\n'.join(code)
# First, prove that with -S (no 'import site'), the paths are
# relative.
proc = subprocess.Popen([sys.executable, '-S', '-c', command],
env=env,
stdout=subprocess.PIPE)
stdout, stderr = proc.communicate()
self.assertEqual(proc.returncode, 0)
os__file__, os__cached__ = stdout.splitlines()[:2]
self.assertFalse(os.path.isabs(os__file__))
self.assertFalse(os.path.isabs(os__cached__))
# Now, with 'import site', it works.
proc = subprocess.Popen([sys.executable, '-c', command],
env=env,
stdout=subprocess.PIPE)
stdout, stderr = proc.communicate()
self.assertEqual(proc.returncode, 0)
os__file__, os__cached__ = stdout.splitlines()[:2]
self.assertTrue(os.path.isabs(os__file__))
self.assertTrue(os.path.isabs(os__cached__))
def test_no_duplicate_paths(self):
# No duplicate paths should exist in sys.path
# Handled by removeduppaths()
site.removeduppaths()
seen_paths = set()
for path in sys.path:
self.assertNotIn(path, seen_paths)
seen_paths.add(path)
@unittest.skip('test not implemented')
def test_add_build_dir(self):
# Test that the build directory's Modules directory is used when it
# should be.
# XXX: implement
pass
def test_setting_quit(self):
# 'quit' and 'exit' should be injected into builtins
self.assertTrue(hasattr(builtins, "quit"))
self.assertTrue(hasattr(builtins, "exit"))
def test_setting_copyright(self):
# 'copyright', 'credits', and 'license' should be in builtins
self.assertTrue(hasattr(builtins, "copyright"))
self.assertTrue(hasattr(builtins, "credits"))
self.assertTrue(hasattr(builtins, "license"))
def test_setting_help(self):
# 'help' should be set in builtins
self.assertTrue(hasattr(builtins, "help"))
def test_aliasing_mbcs(self):
if sys.platform == "win32":
import locale
if locale.getdefaultlocale()[1].startswith('cp'):
for value in encodings.aliases.aliases.values():
if value == "mbcs":
break
else:
self.fail("did not alias mbcs")
def test_sitecustomize_executed(self):
# If sitecustomize is available, it should have been imported.
if "sitecustomize" not in sys.modules:
try:
import sitecustomize
except ImportError:
pass
else:
self.fail("sitecustomize not imported automatically")
@test.support.requires_resource('network')
@test.support.system_must_validate_cert
@unittest.skipUnless(sys.version_info[3] == 'final',
'only for released versions')
@unittest.skipUnless(hasattr(urllib.request, "HTTPSHandler"),
'need SSL support to download license')
def test_license_exists_at_url(self):
# This test is a bit fragile since it depends on the format of the
# string displayed by license in the absence of a LICENSE file.
url = license._Printer__data.split()[1]
req = urllib.request.Request(url, method='HEAD')
try:
with test.support.transient_internet(url):
with urllib.request.urlopen(req) as data:
code = data.getcode()
except urllib.error.HTTPError as e:
code = e.code
self.assertEqual(code, 200, msg="Can't find " + url)
class StartupImportTests(unittest.TestCase):
def test_startup_imports(self):
# This tests checks which modules are loaded by Python when it
# initially starts upon startup.
popen = subprocess.Popen([sys.executable, '-I', '-v', '-c',
'import sys; print(set(sys.modules))'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = popen.communicate()
stdout = stdout.decode('utf-8')
stderr = stderr.decode('utf-8')
modules = eval(stdout)
self.assertIn('site', modules)
# http://bugs.python.org/issue19205
re_mods = {'re', '_sre', 'sre_compile', 'sre_constants', 'sre_parse'}
# _osx_support uses the re module in many placs
if sys.platform != 'darwin':
self.assertFalse(modules.intersection(re_mods), stderr)
# http://bugs.python.org/issue9548
self.assertNotIn('locale', modules, stderr)
if sys.platform != 'darwin':
# http://bugs.python.org/issue19209
self.assertNotIn('copyreg', modules, stderr)
# http://bugs.python.org/issue19218>
collection_mods = {'_collections', 'collections', 'functools',
'heapq', 'itertools', 'keyword', 'operator',
'reprlib', 'types', 'weakref'
}.difference(sys.builtin_module_names)
self.assertFalse(modules.intersection(collection_mods), stderr)
if __name__ == "__main__":
unittest.main()
| 40.602128 | 78 | 0.606613 | import unittest
import test.support
from test.support import captured_stderr, TESTFN, EnvironmentVarGuard
import builtins
import os
import sys
import re
import encodings
import urllib.request
import urllib.error
import subprocess
import sysconfig
from copy import copy
if sys.flags.no_site:
raise unittest.SkipTest("Python was invoked with -S")
import site
if site.ENABLE_USER_SITE and not os.path.isdir(site.USER_SITE):
os.makedirs(site.USER_SITE)
site.addsitedir(site.USER_SITE)
class HelperFunctionsTests(unittest.TestCase):
def setUp(self):
self.sys_path = sys.path[:]
self.old_base = site.USER_BASE
self.old_site = site.USER_SITE
self.old_prefixes = site.PREFIXES
self.original_vars = sysconfig._CONFIG_VARS
self.old_vars = copy(sysconfig._CONFIG_VARS)
def tearDown(self):
sys.path[:] = self.sys_path
site.USER_BASE = self.old_base
site.USER_SITE = self.old_site
site.PREFIXES = self.old_prefixes
sysconfig._CONFIG_VARS = self.original_vars
sysconfig._CONFIG_VARS.clear()
sysconfig._CONFIG_VARS.update(self.old_vars)
def test_makepath(self):
path_parts = ("Beginning", "End")
original_dir = os.path.join(*path_parts)
abs_dir, norm_dir = site.makepath(*path_parts)
self.assertEqual(os.path.abspath(original_dir), abs_dir)
if original_dir == os.path.normcase(original_dir):
self.assertEqual(abs_dir, norm_dir)
else:
self.assertEqual(os.path.normcase(abs_dir), norm_dir)
def test_init_pathinfo(self):
dir_set = site._init_pathinfo()
for entry in [site.makepath(path)[1] for path in sys.path
if path and os.path.isdir(path)]:
self.assertIn(entry, dir_set,
"%s from sys.path not found in set returned "
"by _init_pathinfo(): %s" % (entry, dir_set))
def pth_file_tests(self, pth_file):
self.assertIn(pth_file.imported, sys.modules,
"%s not in sys.modules" % pth_file.imported)
self.assertIn(site.makepath(pth_file.good_dir_path)[0], sys.path)
self.assertFalse(os.path.exists(pth_file.bad_dir_path))
def test_addpackage(self):
pth_file = PthFile()
pth_file.cleanup(prep=True)
try:
pth_file.create()
site.addpackage(pth_file.base_dir, pth_file.filename, set())
self.pth_file_tests(pth_file)
finally:
pth_file.cleanup()
def make_pth(self, contents, pth_dir='.', pth_name=TESTFN):
# Create a .pth file and return its (abspath, basename).
pth_dir = os.path.abspath(pth_dir)
pth_basename = pth_name + '.pth'
pth_fn = os.path.join(pth_dir, pth_basename)
pth_file = open(pth_fn, 'w', encoding='utf-8')
self.addCleanup(lambda: os.remove(pth_fn))
pth_file.write(contents)
pth_file.close()
return pth_dir, pth_basename
def test_addpackage_import_bad_syntax(self):
# Issue 10642
pth_dir, pth_fn = self.make_pth("import bad)syntax\n")
with captured_stderr() as err_out:
site.addpackage(pth_dir, pth_fn, set())
self.assertRegex(err_out.getvalue(), "line 1")
self.assertRegex(err_out.getvalue(),
re.escape(os.path.join(pth_dir, pth_fn)))
# XXX: the previous two should be independent checks so that the
# order doesn't matter. The next three could be a single check
self.assertRegex(err_out.getvalue(), 'Traceback')
self.assertRegex(err_out.getvalue(), r'import bad\)syntax')
self.assertRegex(err_out.getvalue(), 'SyntaxError')
def test_addpackage_import_bad_exec(self):
# Issue 10642
pth_dir, pth_fn = self.make_pth("randompath\nimport nosuchmodule\n")
with captured_stderr() as err_out:
site.addpackage(pth_dir, pth_fn, set())
self.assertRegex(err_out.getvalue(), "line 2")
self.assertRegex(err_out.getvalue(),
re.escape(os.path.join(pth_dir, pth_fn)))
# XXX: ditto previous XXX comment.
self.assertRegex(err_out.getvalue(), 'Traceback')
self.assertRegex(err_out.getvalue(), 'ImportError')
@unittest.skipIf(sys.platform == "win32", "Windows does not raise an "
"error for file paths containing null characters")
def test_addpackage_import_bad_pth_file(self):
# Issue 5258
pth_dir, pth_fn = self.make_pth("abc\x00def\n")
with captured_stderr() as err_out:
site.addpackage(pth_dir, pth_fn, set())
self.assertRegex(err_out.getvalue(), "line 1")
self.assertRegex(err_out.getvalue(),
re.escape(os.path.join(pth_dir, pth_fn)))
# XXX: ditto previous XXX comment.
self.assertRegex(err_out.getvalue(), 'Traceback')
self.assertRegex(err_out.getvalue(), 'TypeError')
def test_addsitedir(self):
# Same tests for test_addpackage since addsitedir() essentially just
# calls addpackage() for every .pth file in the directory
pth_file = PthFile()
pth_file.cleanup(prep=True) # Make sure that nothing is pre-existing
# that is tested for
try:
pth_file.create()
site.addsitedir(pth_file.base_dir, set())
self.pth_file_tests(pth_file)
finally:
pth_file.cleanup()
@unittest.skipUnless(site.ENABLE_USER_SITE, "requires access to PEP 370 "
"user-site (site.ENABLE_USER_SITE)")
def test_s_option(self):
usersite = site.USER_SITE
self.assertIn(usersite, sys.path)
env = os.environ.copy()
rc = subprocess.call([sys.executable, '-c',
'import sys; sys.exit(%r in sys.path)' % usersite],
env=env)
self.assertEqual(rc, 1)
env = os.environ.copy()
rc = subprocess.call([sys.executable, '-s', '-c',
'import sys; sys.exit(%r in sys.path)' % usersite],
env=env)
if usersite == site.getsitepackages()[0]:
self.assertEqual(rc, 1)
else:
self.assertEqual(rc, 0)
env = os.environ.copy()
env["PYTHONNOUSERSITE"] = "1"
rc = subprocess.call([sys.executable, '-c',
'import sys; sys.exit(%r in sys.path)' % usersite],
env=env)
if usersite == site.getsitepackages()[0]:
self.assertEqual(rc, 1)
else:
self.assertEqual(rc, 0)
env = os.environ.copy()
env["PYTHONUSERBASE"] = "/tmp"
rc = subprocess.call([sys.executable, '-c',
'import sys, site; sys.exit(site.USER_BASE.startswith("/tmp"))'],
env=env)
self.assertEqual(rc, 1)
def test_getuserbase(self):
site.USER_BASE = None
user_base = site.getuserbase()
# the call sets site.USER_BASE
self.assertEqual(site.USER_BASE, user_base)
# let's set PYTHONUSERBASE and see if it uses it
site.USER_BASE = None
import sysconfig
sysconfig._CONFIG_VARS = None
with EnvironmentVarGuard() as environ:
environ['PYTHONUSERBASE'] = 'xoxo'
self.assertTrue(site.getuserbase().startswith('xoxo'),
site.getuserbase())
def test_getusersitepackages(self):
site.USER_SITE = None
site.USER_BASE = None
user_site = site.getusersitepackages()
self.assertEqual(site.USER_SITE, user_site)
self.assertTrue(user_site.startswith(site.USER_BASE), user_site)
def test_getsitepackages(self):
site.PREFIXES = ['xoxo']
dirs = site.getsitepackages()
if (sys.platform == "darwin" and
sysconfig.get_config_var("PYTHONFRAMEWORK")):
site.PREFIXES = ['Python.framework']
dirs = site.getsitepackages()
self.assertEqual(len(dirs), 3)
wanted = os.path.join('/Library',
sysconfig.get_config_var("PYTHONFRAMEWORK"),
sys.version[:3],
'site-packages')
self.assertEqual(dirs[2], wanted)
elif os.sep == '/':
self.assertEqual(len(dirs), 2)
wanted = os.path.join('xoxo', 'lib', 'python' + sys.version[:3],
'site-packages')
self.assertEqual(dirs[0], wanted)
wanted = os.path.join('xoxo', 'lib', 'site-python')
self.assertEqual(dirs[1], wanted)
else:
self.assertEqual(len(dirs), 2)
self.assertEqual(dirs[0], 'xoxo')
wanted = os.path.join('xoxo', 'lib', 'site-packages')
self.assertEqual(dirs[1], wanted)
class PthFile(object):
def __init__(self, filename_base=TESTFN, imported="time",
good_dirname="__testdir__", bad_dirname="__bad"):
self.filename = filename_base + ".pth"
self.base_dir = os.path.abspath('')
self.file_path = os.path.join(self.base_dir, self.filename)
self.imported = imported
self.good_dirname = good_dirname
self.bad_dirname = bad_dirname
self.good_dir_path = os.path.join(self.base_dir, self.good_dirname)
self.bad_dir_path = os.path.join(self.base_dir, self.bad_dirname)
def create(self):
FILE = open(self.file_path, 'w')
try:
print("#import @bad module name", file=FILE)
print("\n", file=FILE)
print("import %s" % self.imported, file=FILE)
print(self.good_dirname, file=FILE)
print(self.bad_dirname, file=FILE)
finally:
FILE.close()
os.mkdir(self.good_dir_path)
def cleanup(self, prep=False):
if os.path.exists(self.file_path):
os.remove(self.file_path)
if prep:
self.imported_module = sys.modules.get(self.imported)
if self.imported_module:
del sys.modules[self.imported]
else:
if self.imported_module:
sys.modules[self.imported] = self.imported_module
if os.path.exists(self.good_dir_path):
os.rmdir(self.good_dir_path)
if os.path.exists(self.bad_dir_path):
os.rmdir(self.bad_dir_path)
class ImportSideEffectTests(unittest.TestCase):
def setUp(self):
self.sys_path = sys.path[:]
def tearDown(self):
sys.path[:] = self.sys_path
def test_abs_paths(self):
parent = os.path.relpath(os.path.dirname(os.__file__))
env = os.environ.copy()
env['PYTHONPATH'] = parent
code = ('import os, sys',
'os_file = os.__file__.encode("ascii", "backslashreplace")',
r'sys.stdout.buffer.write(os_file + b"\n")',
'os_cached = os.__cached__.encode("ascii", "backslashreplace")',
r'sys.stdout.buffer.write(os_cached + b"\n")')
command = '\n'.join(code)
proc = subprocess.Popen([sys.executable, '-S', '-c', command],
env=env,
stdout=subprocess.PIPE)
stdout, stderr = proc.communicate()
self.assertEqual(proc.returncode, 0)
os__file__, os__cached__ = stdout.splitlines()[:2]
self.assertFalse(os.path.isabs(os__file__))
self.assertFalse(os.path.isabs(os__cached__))
proc = subprocess.Popen([sys.executable, '-c', command],
env=env,
stdout=subprocess.PIPE)
stdout, stderr = proc.communicate()
self.assertEqual(proc.returncode, 0)
os__file__, os__cached__ = stdout.splitlines()[:2]
self.assertTrue(os.path.isabs(os__file__))
self.assertTrue(os.path.isabs(os__cached__))
def test_no_duplicate_paths(self):
site.removeduppaths()
seen_paths = set()
for path in sys.path:
self.assertNotIn(path, seen_paths)
seen_paths.add(path)
@unittest.skip('test not implemented')
def test_add_build_dir(self):
# should be.
# XXX: implement
pass
def test_setting_quit(self):
# 'quit' and 'exit' should be injected into builtins
self.assertTrue(hasattr(builtins, "quit"))
self.assertTrue(hasattr(builtins, "exit"))
def test_setting_copyright(self):
# 'copyright', 'credits', and 'license' should be in builtins
self.assertTrue(hasattr(builtins, "copyright"))
self.assertTrue(hasattr(builtins, "credits"))
self.assertTrue(hasattr(builtins, "license"))
def test_setting_help(self):
# 'help' should be set in builtins
self.assertTrue(hasattr(builtins, "help"))
def test_aliasing_mbcs(self):
if sys.platform == "win32":
import locale
if locale.getdefaultlocale()[1].startswith('cp'):
for value in encodings.aliases.aliases.values():
if value == "mbcs":
break
else:
self.fail("did not alias mbcs")
def test_sitecustomize_executed(self):
# If sitecustomize is available, it should have been imported.
if "sitecustomize" not in sys.modules:
try:
import sitecustomize
except ImportError:
pass
else:
self.fail("sitecustomize not imported automatically")
@test.support.requires_resource('network')
@test.support.system_must_validate_cert
@unittest.skipUnless(sys.version_info[3] == 'final',
'only for released versions')
@unittest.skipUnless(hasattr(urllib.request, "HTTPSHandler"),
'need SSL support to download license')
def test_license_exists_at_url(self):
# This test is a bit fragile since it depends on the format of the
# string displayed by license in the absence of a LICENSE file.
url = license._Printer__data.split()[1]
req = urllib.request.Request(url, method='HEAD')
try:
with test.support.transient_internet(url):
with urllib.request.urlopen(req) as data:
code = data.getcode()
except urllib.error.HTTPError as e:
code = e.code
self.assertEqual(code, 200, msg="Can't find " + url)
class StartupImportTests(unittest.TestCase):
def test_startup_imports(self):
popen = subprocess.Popen([sys.executable, '-I', '-v', '-c',
'import sys; print(set(sys.modules))'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = popen.communicate()
stdout = stdout.decode('utf-8')
stderr = stderr.decode('utf-8')
modules = eval(stdout)
self.assertIn('site', modules)
re_mods = {'re', '_sre', 'sre_compile', 'sre_constants', 'sre_parse'}
if sys.platform != 'darwin':
self.assertFalse(modules.intersection(re_mods), stderr)
self.assertNotIn('locale', modules, stderr)
if sys.platform != 'darwin':
self.assertNotIn('copyreg', modules, stderr)
collection_mods = {'_collections', 'collections', 'functools',
'heapq', 'itertools', 'keyword', 'operator',
'reprlib', 'types', 'weakref'
}.difference(sys.builtin_module_names)
self.assertFalse(modules.intersection(collection_mods), stderr)
if __name__ == "__main__":
unittest.main()
| true | true |
f71cf74301c83443bdf85dae4b32b0b7c4ddf129 | 518 | py | Python | jax_cfd/ml/optimizer_modules.py | ngam/jax-cfd | 8eff9c47bdc7fb19b6453db94ca65f6be64d91f6 | [
"Apache-2.0"
] | 244 | 2021-05-18T18:49:14.000Z | 2022-03-30T18:27:21.000Z | jax_cfd/ml/optimizer_modules.py | ngam/jax-cfd | 8eff9c47bdc7fb19b6453db94ca65f6be64d91f6 | [
"Apache-2.0"
] | 14 | 2021-06-24T22:15:44.000Z | 2022-03-30T06:22:52.000Z | jax_cfd/ml/optimizer_modules.py | ngam/jax-cfd | 8eff9c47bdc7fb19b6453db94ca65f6be64d91f6 | [
"Apache-2.0"
] | 36 | 2021-05-29T09:30:44.000Z | 2022-03-28T12:33:40.000Z | """Configurable optimizers from JAX."""
import gin
from jax.example_libraries import optimizers
@gin.configurable
def optimizer(value):
return value
gin.external_configurable(optimizers.adam)
gin.external_configurable(optimizers.momentum)
gin.external_configurable(optimizers.nesterov)
gin.external_configurable(optimizers.exponential_decay)
gin.external_configurable(optimizers.inverse_time_decay)
gin.external_configurable(optimizers.polynomial_decay)
gin.external_configurable(optimizers.piecewise_constant)
| 27.263158 | 56 | 0.864865 | import gin
from jax.example_libraries import optimizers
@gin.configurable
def optimizer(value):
return value
gin.external_configurable(optimizers.adam)
gin.external_configurable(optimizers.momentum)
gin.external_configurable(optimizers.nesterov)
gin.external_configurable(optimizers.exponential_decay)
gin.external_configurable(optimizers.inverse_time_decay)
gin.external_configurable(optimizers.polynomial_decay)
gin.external_configurable(optimizers.piecewise_constant)
| true | true |
f71cf7ceb04c02f72c61db2c923addcb66daac1d | 96,917 | py | Python | anima/env/mayaEnv/toolbox.py | Khosiyat/anima | f631c08400547f49ac5f1feeb730f22c255eb771 | [
"MIT"
] | 1 | 2021-07-03T19:03:41.000Z | 2021-07-03T19:03:41.000Z | anima/env/mayaEnv/toolbox.py | Khosiyat/anima | f631c08400547f49ac5f1feeb730f22c255eb771 | [
"MIT"
] | null | null | null | anima/env/mayaEnv/toolbox.py | Khosiyat/anima | f631c08400547f49ac5f1feeb730f22c255eb771 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import functools
import os
from anima.env.mayaEnv.animation import Animation
from anima.env.mayaEnv.general import General
from anima.env.mayaEnv.modeling import Modeling
from anima.env.mayaEnv.previs import Previs
from anima.env.mayaEnv.reference import Reference
from anima.env.mayaEnv.render import Render
from anima.env.mayaEnv.rigging import Rigging
import pymel.core as pm
import maya.mel as mel
from anima.env.mayaEnv import auxiliary, camera_tools
__last_commands__ = [] # list of dictionaries
__last_tab__ = 'ANIMA_TOOLBOX_LAST_TAB_INDEX'
__commands__ = []
def repeater(index):
"""repeats the last command with the given index
"""
global __last_commands__
try:
call_data = __last_commands__[index]
return call_data[0](*call_data[1], **call_data[2])
except IndexError:
return None
def repeat_last(call_data):
"""own implementation of pm.repeatLast
"""
global __last_commands__
index = len(__last_commands__)
callable_ = call_data[0]
args = call_data[1]
kwargs = call_data[2]
command = \
'print \\"\\";python(\\\"from anima.env.mayaEnv.toolbox import ' \
'repeater; repeater(%s);\\\");' % index
repeat_last_command = 'repeatLast -ac "%(command)s" -acl "%(label)s";' % {
'command': command,
'label': callable_.__name__
}
print(repeat_last_command)
pm.mel.eval(repeat_last_command)
__last_commands__.append(call_data)
# also call the callable
callable_(*args, **kwargs)
def repeated_callback(callable_, *args, **kwargs):
"""Adds the given callable to the last commands list and adds a caller to
the pm.repeatLast
"""
return pm.Callback(
repeat_last, [callable_, args, kwargs]
)
class Color(object):
"""a simple color class
"""
colors = [
(1.000, 0.500, 0.666),
(1.000, 0.833, 0.500),
(0.666, 1.000, 0.500),
(0.500, 1.000, 0.833),
(0.500, 0.666, 1.000),
(0.833, 0.500, 1.000)
]
def __init__(self, index=0):
self.index = index
self.max_colors = len(self.colors)
def change(self):
"""updates the index to the next one
"""
self.index = int((self.index + 1) % self.max_colors)
def reset(self):
"""resets the color index
"""
self.index = 0
@property
def color(self):
"""returns the current color values
"""
return self.colors[self.index]
def filter_tools(search_text):
"""filters toolbox
:param str search_text: The search_text
"""
for command in __commands__:
uitype = command.type()
if uitype == 'button':
label = command.getLabel()
if search_text.lower() not in label.lower():
command.setVisible(False)
else:
command.setVisible(True)
elif uitype == 'rowLayout':
# get the children
children = command.children()
matched_children = False
for c in children:
c_uitype = c.type()
if c_uitype in ['button', 'staticText'] and \
search_text in c.getLabel().lower():
matched_children = True
break
if not matched_children:
command.setVisible(False)
else:
command.setVisible(True)
def UI():
# window setup
width = 260
height = 650
row_spacing = 3
color = Color()
# init the __commands LUT
global __commands__
__commands__ = []
if pm.dockControl("toolbox_dockControl", q=True, ex=True):
pm.deleteUI("toolbox_dockControl")
window_name = "toolbox_window"
if pm.window(window_name, q=True, ex=True):
pm.deleteUI(window_name, wnd=True)
toolbox_window = pm.window(
window_name,
wh=(width, height),
title="Anima ToolBox"
)
# the layout that holds the tabs
main_form_layout = pm.formLayout(
'main_form_layout', nd=100, parent=toolbox_window
)
search_field = pm.textField(
'search_text_field',
tcc=filter_tools,
placeholderText='Search...',
parent=main_form_layout
)
main_tab_layout = pm.tabLayout(
'main_tab_layout', scr=True, cr=True, parent=main_form_layout
)
# attach the main_tab_layout to main_form_layout
pm.formLayout(
main_form_layout, edit=True,
attachForm=[
(search_field, "top", 0),
(search_field, "left", 0),
(search_field, "right", 0),
# (main_tab_layout, "top", 0),
(main_tab_layout, "bottom", 0),
(main_tab_layout, "left", 0),
(main_tab_layout, "right", 0)
],
attachNone=[
(search_field, "bottom")
],
attachControl=[
(main_tab_layout, "top", 0, search_field)
]
)
with main_tab_layout:
# ----- GENERAL ------
general_column_layout = pm.columnLayout(
'general_column_layout',
adj=True,
cal="center",
rs=row_spacing
)
with general_column_layout:
color.change()
pm.button(
'open_version_button',
l="Open Version",
c=repeated_callback(General.version_dialog, mode=1),
ann="Open Version",
bgc=color.color
)
pm.button(
'save_as_version_button',
l="Save As Version",
c=repeated_callback(General.version_dialog, mode=0),
ann="Save As Version",
bgc=color.color
)
color.change()
pm.button(
'selectionManager_button',
l="Selection Manager",
c=repeated_callback(General.selection_manager),
ann="Selection Manager",
bgc=color.color
)
color.change()
pm.button(
'publishChecker_button',
l="Publish Checker",
c=repeated_callback(General.publish_checker),
ann="Publish Checker",
bgc=color.color
)
color.change()
pm.button(
'rename_unique_button',
l='Rename Unique',
c=repeated_callback(General.rename_unique),
ann=General.rename_unique.__doc__,
bgc=color.color
)
pm.button(
'removeColonFromNames_button',
l="remove colon(:) from node names",
c=repeated_callback(General.remove_colon_from_names),
ann="removes the colon (:) character from all "
"selected object names",
bgc=color.color
)
pm.button(
'removePastedFromNames_button',
l="remove \"pasted_\" from node names",
c=repeated_callback(General.remove_pasted),
ann="removes the \"passed__\" from all selected "
"object names",
bgc=color.color
)
color.change()
pm.button(
'togglePolyMeshes_button',
l="toggle polymesh visibility",
c=repeated_callback(General.toggle_poly_meshes),
ann="toggles the polymesh display in the active model "
"panel",
bgc=color.color
)
color.change()
pm.button(
'selectSetMembers_button',
l="select set members",
c=repeated_callback(General.select_set_members),
ann="selects the selected set members in correct "
"order",
bgc=color.color
)
color.change()
pm.button(
'delete_unused_intermediate_shapes_button',
l='Delete Unused Intermediate Shape Nodes',
c=repeated_callback(General.delete_unused_intermediate_shapes),
ann='Deletes unused (no connection) intermediate shape nodes',
bgc=color.color
)
color.change()
pm.button(
'export_transform_info_button',
l='Export Transform Info',
c=repeated_callback(General.export_transform_info),
ann='exports transform info',
bgc=color.color
)
pm.button(
'import_transform_info_button',
l='Import Transform Info',
c=repeated_callback(General.import_transform_info),
ann='imports transform info',
bgc=color.color
)
color.change()
pm.button(
'export_global_transform_info_button',
l='Export Global Transform Info',
c=repeated_callback(General.export_transform_info, True),
ann='exports global transform info',
bgc=color.color
)
pm.button(
'import_global_transform_info_button',
l='Import Global Transform Info',
c=repeated_callback(General.import_transform_info, True),
ann='imports global transform info',
bgc=color.color
)
color.change()
pm.button(
'export_component_transform_info_button',
l='Export Component Transform Info',
c=repeated_callback(General.export_component_transform_info),
ann='exports component transform info',
bgc=color.color
)
pm.button(
'import_component_transform_info_button',
l='Import Component Transform Info',
c=repeated_callback(General.import_component_transform_info),
ann='imports component transform info',
bgc=color.color
)
color.change()
pm.button(
'import_rsproxy_data_from_houdini_button',
l='Import RSProxy Data From Houdini',
c=repeated_callback(General.rsproxy_data_importer),
ann=General.rsproxy_data_importer.__doc__,
bgc=color.color
)
color.change()
pm.button(
'generate_thumbnail_button',
l='Generate Thumbnail',
c=repeated_callback(General.generate_thumbnail),
ann='Generates thumbnail for current scene',
bgc=color.color
)
color.change()
pm.button(
'cleanup_light_cameras_button',
l='Cleanup Light Cameras',
c=repeated_callback(General.cleanup_light_cameras),
ann=General.cleanup_light_cameras.__doc__,
bgc=color.color
)
color.change()
from anima.env.mayaEnv.general import unknown_plugin_cleaner_ui
pm.button(
'cleanup_plugins_button',
l='Cleanup Unknown Plugins',
c=repeated_callback(unknown_plugin_cleaner_ui),
ann=unknown_plugin_cleaner_ui.__doc__,
bgc=color.color
)
color.change()
pm.button(
'unshape_parent_node_button',
l='Unshape Parent Nodes',
c=repeated_callback(General.unshape_parent_nodes),
ann=General.unshape_parent_nodes.__doc__,
bgc=color.color
)
# store commands
__commands__.extend(general_column_layout.children())
# ----- REFERENCE ------
reference_columnLayout = pm.columnLayout(
'reference_columnLayout',
adj=True, cal="center", rs=row_spacing)
with reference_columnLayout:
color.reset()
pm.text(l='===== Reference Tools =====')
pm.button(
'nsDelete_button',
l="nsDelete",
c=repeated_callback(General.namespace_deleter),
ann=General.namespace_deleter.__doc__,
bgc=color.color
)
color.change()
pm.button(
'duplicate_selected_reference_button',
l='Duplicate Selected Reference',
c=repeated_callback(Reference.duplicate_selected_reference),
ann='Duplicates the selected reference',
bgc=color.color
)
color.change()
pm.button(
'select_reference_in_reference_editor_button',
l='Select Reference In Reference Editor',
c=repeated_callback(
Reference.select_reference_in_reference_editor
),
ann=Reference.select_reference_in_reference_editor.__doc__,
bgc=color.color
)
color.change()
pm.button(
'get_selected_reference_path_button',
l='Get Selected Reference Path',
c=repeated_callback(Reference.get_selected_reference_path),
ann='Prints the selected reference full path',
bgc=color.color
)
pm.button(
'open_selected_reference_button',
l='Open Selected Reference in New Maya',
c=repeated_callback(Reference.open_reference_in_new_maya),
ann='Opens the selected reference in new Maya '
'instance',
bgc=color.color
)
color.change()
pm.button(
'publish_model_as_look_dev_button',
l='Model -> LookDev',
c=repeated_callback(Reference.publish_model_as_look_dev),
ann='References the current Model scene to the LookDev scene '
'of the same task, creates the LookDev scene if '
'necessary, also reopens the current model scene.',
bgc=color.color
)
color.change()
pm.button(
'fix_reference_namespace_button',
l='Fix Reference Namespace',
c=repeated_callback(Reference.fix_reference_namespace),
ann='Fixes old style reference namespaces with new one, '
'creates new versions if necessary.',
bgc=color.color
)
color.change()
pm.button(
'fix_reference_paths_button',
l='Fix Reference Paths',
c=repeated_callback(Reference.fix_reference_paths),
ann='Fixes reference paths deeply, so they will use'
'$REPO env var.',
bgc=color.color
)
pm.button(
'fix_student_license_on_references_button',
l='Fix Student License Error On References',
c=repeated_callback(
Reference.fix_student_license_on_references
),
ann=Reference.fix_student_license.__doc__,
bgc=color.color
)
pm.button(
'fix_student_license_on_files_button',
l='Fix Student License Error On Selected Files',
c=repeated_callback(
Reference.fix_student_license_on_selected_file
),
ann=Reference.fix_student_license.__doc__,
bgc=color.color
)
color.change()
pm.button(
'archive_button',
l='Archive Current Scene',
c=repeated_callback(Reference.archive_current_scene),
ann='Creates a ZIP file containing the current scene and its'
'references in a flat Maya default project folder '
'structure',
bgc=color.color
)
pm.button(
'bind_to_original_button',
l='Bind To Original',
c=repeated_callback(Reference.bind_to_original),
ann='Binds the current local references to the ones on the '
'repository',
bgc=color.color
)
pm.button(
'unload_selected_references_button',
l='Unload Selected References',
c=repeated_callback(Reference.unload_selected_references),
ann='Unloads the highest references that is related with the selected objects',
bgc=color.color
)
pm.button(
'unload_unselected_references_button',
l='Unload UnSelected References',
c=repeated_callback(Reference.unload_unselected_references),
ann='Unloads any references that is not related with the '
'selected objects',
bgc=color.color
)
color.change()
pm.button(
'remove_selected_references_button',
l='Remove Selected References',
c=repeated_callback(Reference.remove_selected_references),
ann='Removes the highest references that is related with the selected objects',
bgc=color.color
)
color.change()
pm.text(l='===== Representation Tools =====')
with pm.rowLayout(nc=2, adj=1):
pm.checkBoxGrp(
'generate_repr_types_checkbox_grp',
l='Reprs',
numberOfCheckBoxes=3,
labelArray3=['GPU', 'ASS', 'RS'],
cl4=['left', 'left', 'left', 'left'],
cw4=[51, 50, 50, 50],
valueArray3=[1, 1, 1]
)
pm.checkBox(
'generate_repr_skip_existing_checkBox',
l='Skip existing Reprs.',
value=0
)
pm.button(
'generate_repr_of_all_references_button',
l='Deep Generate Repr Of All References',
c=repeated_callback(
Reference.generate_repr_of_all_references_caller
),
ann='Deeply generates desired Representations of all '
'references of this scene',
bgc=color.color
)
pm.button(
'generate_repr_of_scene_button',
l='Generate Repr Of This Scene',
c=repeated_callback(Reference.generate_repr_of_scene_caller),
ann='Generates desired Representations of this scene',
bgc=color.color
)
color.change()
with pm.rowLayout(nc=2, adj=1):
pm.radioButtonGrp(
'repr_apply_to_radio_button_grp',
l='Apply To',
# ad3=1,
labelArray2=['Selected', 'All References'],
numberOfRadioButtons=2,
cl3=['left', 'left', 'left'],
cw3=[50, 65, 65],
sl=1
)
pm.button(
'to_base_button',
l='To Base',
c=repeated_callback(Reference.to_base),
ann='Convert selected to Base representation',
bgc=color.color
)
pm.button(
'to_gpu_button',
l='To GPU',
c=repeated_callback(Reference.to_gpu),
ann='Convert selected to GPU representation',
bgc=color.color
)
pm.button(
'to_ass_button',
l='To ASS',
c=repeated_callback(Reference.to_ass),
ann='Convert selected to ASS representation',
bgc=color.color
)
pm.button(
'to_rs_button',
l='To RS',
c=repeated_callback(Reference.to_rs),
ann='Convert selected to RS representation',
bgc=color.color
)
color.change()
pm.button(
'update_alembic_references_button',
l='Update Alembic References',
c=repeated_callback(auxiliary.update_alembic_references),
ann=auxiliary.update_alembic_references.__doc__,
bgc=color.color
)
# store commands
__commands__.extend(reference_columnLayout.children())
# ----- MODELING ------
modeling_column_layout = pm.columnLayout(
'modeling_column_layout',
adj=True, cal="center", rs=row_spacing)
with modeling_column_layout:
color.reset()
pm.button('toggleFaceNormalDisplay_button',
l="toggle face normal display",
c=repeated_callback(
pm.runtime.ToggleFaceNormalDisplay),
ann="toggles face normal display",
bgc=color.color)
pm.button('reverseNormals_button', l="reverse normals",
c=repeated_callback(Modeling.reverse_normals),
ann="reverse normals",
bgc=color.color)
pm.button('fixNormals_button', l="fix normals",
c=repeated_callback(Modeling.fix_normals),
ann="applies setToFace then conform and then "
"soften edge to all selected objects",
bgc=color.color)
color.change()
pm.button(
'oyHierarchyInstancer_button',
l="hierarchy_instancer on selected",
c=repeated_callback(Modeling.hierarchy_instancer),
ann="hierarchy_instancer on selected",
bgc=color.color
)
color.change()
pm.button(
'relax_verts_button',
l="Relax Vertices",
c=repeated_callback(Modeling.relax_vertices),
ann="opens relax_vertices",
bgc=color.color
)
with pm.rowLayout(nc=4, adj=1):
def smooth_edges_callback():
iteration = pm.intSliderGrp(
"smooth_edges_iteration_intField", q=1, v=1
)
Modeling.smooth_edges(iteration=iteration)
pm.button(
'smooth_edges_button',
l="Smooth Edges",
c=repeated_callback(smooth_edges_callback),
ann=Modeling.smooth_edges.__doc__,
bgc=color.color
)
pm.intSliderGrp(
'smooth_edges_iteration_intField',
v=100,
min=0,
max=100
)
color.change()
pm.button(
'create_curve_from_mesh_edges_button',
l="Curve From Mesh Edges",
c=repeated_callback(Modeling.create_curve_from_mesh_edges),
ann="Creates a curve from selected mesh edges",
bgc=color.color
)
color.change()
pm.button(
'vertex_aligned_locator_button',
l="Vertex Aligned Locator",
c=repeated_callback(Modeling.vertex_aligned_locator),
ann="Creates an aligned locator from selected vertices",
bgc=color.color
)
color.change()
with pm.rowLayout(nc=8, rat=(1, "both", 0), adj=1):
pm.text('set_pivot_text', l='Set Pivot', bgc=color.color)
pm.button(
'center_button',
l="C",
c=repeated_callback(
Modeling.set_pivot,
0
),
bgc=(0.8, 0.8, 0.8)
)
pm.button(
'minus_X_button',
l="-X",
c=repeated_callback(
Modeling.set_pivot,
1
),
bgc=(1.000, 0.500, 0.666)
)
pm.button(
'plus_X_button',
l="+X",
c=repeated_callback(
Modeling.set_pivot,
2
),
bgc=(1.000, 0.500, 0.666)
)
pm.button(
'minus_Y_button',
l="-Y",
c=repeated_callback(
Modeling.set_pivot,
3
),
bgc=(0.666, 1.000, 0.500)
)
pm.button(
'plus_Y_button',
l="+Y",
c=repeated_callback(
Modeling.set_pivot,
4
),
bgc=(0.666, 1.000, 0.500)
)
pm.button(
'minus_Z_button',
l="-X",
c=repeated_callback(
Modeling.set_pivot,
5
),
bgc=(0.500, 0.666, 1.000)
)
pm.button(
'plus_Z_button',
l="+X",
c=repeated_callback(
Modeling.set_pivot,
6
),
bgc=(0.500, 0.666, 1.000)
)
color.change()
with pm.rowLayout(nc=7, rat=(1, "both", 0), adj=1):
pm.text(l='Text. Res', bgc=color.color)
pm.button(
l="128",
c=repeated_callback(
Modeling.set_texture_res,
128
),
bgc=Color.colors[0]
)
pm.button(
l="256",
c=repeated_callback(
Modeling.set_texture_res,
256
),
bgc=Color.colors[1]
)
pm.button(
l="512",
c=repeated_callback(
Modeling.set_texture_res,
512
),
bgc=Color.colors[2]
)
pm.button(
l="1024",
c=repeated_callback(
Modeling.set_texture_res,
1024
),
bgc=Color.colors[3]
)
pm.button(
l='2048',
c=repeated_callback(
Modeling.set_texture_res,
2048
),
bgc=Color.colors[4]
)
pm.button(
l='4096',
c=repeated_callback(
Modeling.set_texture_res,
4096
),
bgc=Color.colors[5]
)
pm.text(l='========== UV Tools =============')
color.change()
pm.button(
'fix_uvsets_button',
l="Fix UVSets (DiffuseUV -> map1)",
c=repeated_callback(Modeling.fix_uvsets),
ann=Modeling.fix_uvsets,
bgc=color.color
)
color.change()
pm.button(
'select_zero_uv_area_faces_button',
l="Filter Zero UV Area Faces",
c=repeated_callback(Modeling.select_zero_uv_area_faces),
ann="Selects faces with zero uv area",
bgc=color.color
)
color.change()
pm.button(
'create_auto_uvmap_button',
l='Create Auto UVMap',
c=repeated_callback(Modeling.create_auto_uvmap),
ann=Modeling.create_auto_uvmap.__doc__,
bgc=color.color
)
with pm.rowLayout(nc=6, adj=1):
def transfer_uvs_button_callback(*args, **kwargs):
label_lut = {
'W': 0,
'L': 1,
'UV': 2,
'C': 3,
'T': 4
}
sample_space = label_lut[
pm.radioCollection(
'transfer_uvs_radio_collection',
q=1, sl=1
)
]
Modeling.transfer_uvs(sample_space=sample_space)
pm.button('transfer_uvs_button',
l="Transfer UVs",
c=repeated_callback(transfer_uvs_button_callback),
ann="Transfers UVs from one group to other, use it"
"for LookDev -> Alembic",
bgc=color.color)
pm.radioCollection('transfer_uvs_radio_collection')
button_with = 40
pm.radioButton(
'W', w=button_with, al='left', ann='World'
)
pm.radioButton(
'L', w=button_with, al='left', ann='Local'
)
pm.radioButton(
'UV', w=button_with, al='left', ann='UV'
)
pm.radioButton(
'C', w=button_with, al='left', ann='Component', sl=1
)
pm.radioButton(
'T', w=button_with, al='left', ann='Topology'
)
color.change()
pm.text(l='======= Manipulator Tools =======')
pm.button('set_to_point_button',
l="Set To Point",
c=repeated_callback(pm.mel.eval, "manipMoveOrient 1;"),
ann="Set manipulator to the point",
bgc=color.color)
pm.button('set_to_edge_button',
l="Set To Edge",
c=repeated_callback(pm.mel.eval, "manipMoveOrient 2;"),
ann="Set manipulator to the edge",
bgc=color.color)
pm.button('set_to_face_button',
l="Set To Face",
c=repeated_callback(pm.mel.eval, "manipMoveOrient 3;"),
ann="Set manipulator to the face",
bgc=color.color)
color.change()
pm.button('create_bbox_from_selection_button',
l="Create BBOX from selection",
c=repeated_callback(Modeling.bbox_from_selection),
ann=Modeling.bbox_from_selection.__doc__,
bgc=color.color)
# store commands
__commands__.extend(modeling_column_layout.children())
# ----- RIGGING ------
rigging_columnLayout = pm.columnLayout(
'rigging_columnLayout',
adj=True, cal="center",
rs=row_spacing
)
with rigging_columnLayout:
color.reset()
pm.button(
'create_joints_on_curve_ui_button',
l="Create Joints On Curve UI",
c=repeated_callback(Rigging.create_joints_on_curve_ui),
ann=Rigging.create_joints_on_curve_ui.__doc__,
bgc=color.color
)
pm.button(
'mirror_transformation_button',
l="Mirror Transformation",
c=repeated_callback(Rigging.mirror_transformation),
ann=Rigging.mirror_transformation.__doc__,
bgc=color.color
)
color.change()
pm.button(
'IKFKLimbRigger_button',
l="IK/FK Limb Rigger",
c=repeated_callback(Rigging.ik_fk_limb_rigger),
ann=Rigging.ik_fk_limb_rigger.__doc__,
bgc=color.color
)
with pm.rowLayout(nc=2, adj=1):
def ik_fk_limb_rigger_callback():
subdivision = pm.intField('bendy_ik_fk_subdivision_count_field', q=1, v=1)
Rigging.bendy_ik_fk_limb_rigger(subdivision=subdivision)
pm.button(
'bendy_ik_fk_limb_rigger_button',
l='IK/FK Limb Rigger (Bendy)',
c=repeated_callback(ik_fk_limb_rigger_callback),
ann=Rigging.bendy_ik_fk_limb_rigger.__doc__,
bgc=color.color
)
pm.intField('bendy_ik_fk_subdivision_count_field', min=0, v=2)
pm.button(
'ReverseFootRigger_button',
l="Reverse Foot Rigger",
c=repeated_callback(Rigging.reverse_foot_rigger),
ann=Rigging.reverse_foot_rigger.__doc__,
bgc=color.color
)
pm.button(
'squashStretchBendRigger_button',
l="Squash/Stretch/Bend Rigger",
c=repeated_callback(Rigging.squash_stretch_bend_rigger),
ann=Rigging.squash_stretch_bend_rigger.__doc__,
bgc=color.color
)
pm.button(
'setupStretchySplineIKCurve_button',
l="setup stretchy splineIK curve",
c=repeated_callback(Rigging.setup_stretchy_spline_ik_curve),
ann="connects necessary nodes to calculate arcLength "
"change in percent",
bgc=color.color
)
pm.button(
'selectJointsDeformingTheObject_button',
l="select joints deforming the object",
c=repeated_callback(Rigging.select_joints_deforming_object),
ann="select joints that deform the object",
bgc=color.color
)
color.change()
pm.button(
'create_axial_correction_group_button',
l="Create Axial Correction Groups",
c=repeated_callback(Rigging.axial_correction_group),
ann=Rigging.axial_correction_group.__doc__,
bgc=color.color
)
pm.button(
'create_zv_parent_compatible_groups_button',
l="Create ZV Parent Compatible Groups",
c=repeated_callback(Rigging.create_zv_parent_compatible_groups),
ann=Rigging.axial_correction_group.__doc__,
bgc=color.color
)
color.change()
pm.button(
'setClustersToAbsolute_button',
l="set selected clusters to absolute",
c=repeated_callback(Rigging.set_clusters_relative_state, 0),
ann="set Clusters to Absolute",
bgc=color.color
)
pm.button(
'setClustersToRelative_button',
l="set selected clusters to relative",
c=repeated_callback(
Rigging.set_clusters_relative_state, 1
),
ann="set Clusters to Relative",
bgc=color.color
)
color.change()
pm.button(
'addControllerShape_button',
l="add controller shape",
c=repeated_callback(Rigging.add_controller_shape),
ann="add the shape in the selected joint",
bgc=color.color
)
pm.button(
'replaceControllerShape_button',
l="replace controller shape",
c=repeated_callback(Rigging.replace_controller_shape),
ann="replaces the shape in the selected joint",
bgc=color.color
)
color.change()
def pin_controller_callback(color, *args):
"""Creates Pin Controller on the selected Vertex
"""
from anima.env.mayaEnv import rigging
vertex = pm.ls(sl=1)[0]
pc = rigging.PinController()
pc.color = color
pc.pin_to_vertex = vertex
pc.setup()
# TODO: Give the user the ability of selecting custom colors
with pm.rowLayout(nc=4, adj=1):
pm.text(l="Pin Controller")
pm.button('pin_controller_red_button', l="R",
c=repeated_callback(pin_controller_callback, [1, 0, 0]),
ann=pin_controller_callback.__doc__,
bgc=[1, 0, 0])
pm.button('pin_controller_green_button', l="G",
c=repeated_callback(pin_controller_callback, [0, 1, 0]),
ann=pin_controller_callback.__doc__,
bgc=[0, 1, 0])
pm.button('pin_controller_blue_button', l="B",
c=repeated_callback(pin_controller_callback, [0, 0, 1]),
ann=pin_controller_callback.__doc__,
bgc=[0, 0, 1])
pm.button('rivet_button', l="create rivet",
c=repeated_callback(mel.eval, 'rivet'),
ann="create rivet",
bgc=color.color)
pm.button('oyAutoRivet_button', l="auto rivet",
c=repeated_callback(mel.eval, 'oyAutoRivet'),
ann="auto rivet",
bgc=color.color)
pm.button(
'oyAutoRivetFollicle_button',
l="auto rivet (Follicle)",
c=repeated_callback(auxiliary.auto_rivet),
ann="creates a rivet setup by using hair follicles",
bgc=color.color
)
pm.button(
'rivet_per_face_button',
l="rivet per face (Follicle)",
c=repeated_callback(auxiliary.rivet_per_face),
ann="creates a rivet setup per selected face by using hair "
"follicles",
bgc=color.color
)
pm.button('create_hair_from_curves_button',
l="Create Hair From Curves",
c=repeated_callback(auxiliary.hair_from_curves),
ann="creates hair from curves",
bgc=color.color)
color.change()
pm.button('artPaintSkinWeightsTool_button',
l="paint weights tool",
c=repeated_callback(mel.eval, 'ArtPaintSkinWeightsTool'),
ann="paint weights tool",
bgc=color.color)
def skin_tools_ui_caller(*args):
from anima.env.mayaEnv.rigging import SkinToolsUI
st = SkinToolsUI()
st.ui()
pm.button('skin_tools_button', l="Skin Tools",
c=skin_tools_ui_caller,
ann="skin tools",
bgc=color.color)
pm.button('oyFixBoundJoint_button', l="fix_bound_joint",
c=repeated_callback(Rigging.fix_bound_joint),
ann="fix_bound_joint",
bgc=color.color)
pm.button('toggle_local_rotation_axes_button',
l="Toggle Local Rotation Axes",
c=repeated_callback(General.toggle_attributes, "displayLocalAxis"),
ann="Toggle Local Rotation Axes",
bgc=color.color)
pm.button('toggle_display_rotate_pivot_button',
l="Toggle Display Rotate Pivot",
c=repeated_callback(General.toggle_attributes, "displayRotatePivot"),
ann="Toggle Display Rotate Pivot",
bgc=color.color)
pm.button('seroBlendController_button',
l="seroBlendController",
c=repeated_callback(mel.eval, 'seroBlendController'),
ann="seroBlendController",
bgc=color.color)
pm.button('align_to_pole_vector_button',
l="Align To Pole Vector",
c=repeated_callback(auxiliary.align_to_pole_vector),
ann="align to pole vector",
bgc=color.color)
color.change()
pm.button('oyResetCharSet_button', l="oyResetCharSet",
c=repeated_callback(mel.eval, 'oyResetCharSet'),
ann="reset char set",
bgc=color.color)
pm.button('export_blend_connections_button',
l="Export blend connections",
c=repeated_callback(auxiliary.export_blend_connections),
ann="export blend connections",
bgc=color.color)
color.change()
pm.button('createFollicles_button', l="create follicles",
c=repeated_callback(Rigging.create_follicles),
ann="create follicles",
bgc=color.color)
color.change()
pm.button('oyResetTweaks_button', l="reset tweaks",
c=repeated_callback(Rigging.reset_tweaks),
ann="reset tweaks",
bgc=color.color)
color.change()
def add_cacheable_attribute_callback():
"""add <b>cacheable</b> attribute to the selected nodes
"""
for node in pm.selected():
Rigging.add_cacheable_attribute(node)
pm.button('add_cacheable_attr_button', l="add `cacheable` attribute",
c=repeated_callback(add_cacheable_attribute_callback),
ann=add_cacheable_attribute_callback.__doc__,
bgc=color.color)
# store commands
__commands__.extend(rigging_columnLayout.children())
# ----- RENDER ------
render_columnLayout = pm.columnLayout(
'render_columnLayout',
adj=True,
cal="center",
rs=row_spacing
)
with render_columnLayout:
color.reset()
color.change()
pm.button(
'update_render_settings_button',
l="Update Render Settings",
c=repeated_callback(Render.update_render_settings),
ann=Render.update_render_settings.__doc__,
bgc=color.color
)
color.change()
pm.button(
'delete_render_layers_button',
l="Delete Render Layers",
c=repeated_callback(Render.delete_render_layers),
ann=Render.delete_render_layers.__doc__,
bgc=color.color
)
pm.button(
'delete_display_layers_button',
l="Delete Display Layers",
c=repeated_callback(Render.delete_display_layers),
ann=Render.delete_display_layers.__doc__,
bgc=color.color
)
pm.button(
'delete_render_and_display_layers_button',
l="Delete Render and Display Layers",
c=repeated_callback(Render.delete_render_and_display_layers),
ann=Render.delete_render_and_display_layers.__doc__,
bgc=color.color
)
color.change()
pm.button(
'delete_unused_shading_nodes_button',
l="Delete Unused Shading Nodes",
c=repeated_callback(Render.delete_unused_shading_nodes),
ann=Render.delete_unused_shading_nodes.__doc__,
bgc=color.color
)
color.change()
pm.button(
'duplicate_input_graph_button',
l="Duplicate Input Graph",
c=repeated_callback(Render.duplicate_input_graph),
ann=Render.duplicate_input_graph.__doc__,
bgc=color.color
)
pm.button(
'duplicate_with_connections_button',
l="Duplicate With Connections To Network",
c=repeated_callback(Render.duplicate_with_connections),
ann=Render.duplicate_with_connections.__doc__,
bgc=color.color
)
color.change()
pm.text(l='=========== RedShift Tools ===========')
pm.button(
'generate_rs_from_selection_button',
l='Generate RSProxy From Selection',
c=repeated_callback(Render.generate_rsproxy_from_selection),
ann=Render.generate_rsproxy_from_selection.__doc__,
bgc=color.color
)
pm.button(
'generate_rs_from_selection_per_selection_button',
l='Generate RSProxy From Selection (Per Selection)',
c=repeated_callback(Render.generate_rsproxy_from_selection, True),
ann=Render.generate_rsproxy_from_selection.__doc__,
bgc=color.color
)
pm.button(
'set_rsproxy_to_bbox_button',
l='RSProxy -> Bounding Box',
c=repeated_callback(Render.rsproxy_to_bounding_box),
ann=Render.rsproxy_to_bounding_box.__doc__,
bgc=color.color
)
pm.button(
'set_rsproxy_to_preview_mesh_button',
l='RSProxy -> Preview Mesh',
c=repeated_callback(Render.rsproxy_to_preview_mesh),
ann=Render.rsproxy_to_preview_mesh.__doc__,
bgc=color.color
)
color.change()
pm.text(l='===== RedShift IC + IPC Bake =====')
pm.button(
'redshift_ic_ipc_bake_button',
l="Do Bake",
c=repeated_callback(Render.redshift_ic_ipc_bake),
ann=Render.redshift_ic_ipc_bake.__doc__,
bgc=color.color
)
pm.button(
'redshift_ic_ipc_bake_restore_button',
l="Restore Settings",
c=repeated_callback(Render.redshift_ic_ipc_bake_restore),
ann=Render.redshift_ic_ipc_bake_restore.__doc__,
bgc=color.color
)
pm.text(l='======================================')
color.change()
pm.button(
'submit_afanasy_button',
l="Afanasy Job Submitter",
c=repeated_callback(Render.afanasy_job_submitter),
ann=Render.afanasy_job_submitter.__doc__,
bgc=color.color
)
color.change()
pm.button(
'open_node_in_browser_button',
l="Open node in browser",
c=repeated_callback(Render.open_node_in_browser),
ann="Open node in browser",
bgc=color.color
)
color.change()
pm.button('auto_convert_to_redshift_button',
l="Auto Convert Scene To RedShift (BETA)",
c=repeated_callback(Render.auto_convert_to_redshift),
ann="Automatically converts the scene from Arnold to "
"Redshift, including materials and lights",
bgc=color.color)
pm.button('convert_nodes_to_redshift_button',
l="Convert Selected To RedShift (BETA)",
c=repeated_callback(Render.convert_nodes_to_redshift),
ann="Automatically converts the selected node from "
"Arnold to Redshift",
bgc=color.color)
def set_shape_attribute_wrapper(attr_name, value):
"""a wrapper function for set_shape_attribute
"""
apply_to_hierarchy = pm.checkBox(
apply_to_hierarchy_checkBox,
q=True,
v=True
)
disable_undo = pm.checkBox(
disable_undo_queue_check_box,
q=True,
v=True
)
Render.set_shape_attribute(
attr_name,
value,
apply_to_hierarchy,
disable_undo
)
with pm.rowLayout(nc=3, rat=(1, "both", 0), adj=1):
pm.text('renderThumbnailUpdate_text',
l="renderThumbnailUpdate",
bgc=color.color)
pm.button('set_renderThumbnailUpdate_ON_button',
l="ON",
c=repeated_callback(pm.renderThumbnailUpdate, 1),
bgc=(0, 1, 0))
pm.button('set_renderThumbnailUpdate_OFF_button',
l="OFF",
c=repeated_callback(pm.renderThumbnailUpdate, 0),
bgc=(1, 0, 0))
color.change()
pm.button('replaceShadersWithLast_button',
l="replace shaders with last",
c=repeated_callback(Render.replace_shaders_with_last),
ann="replace shaders with last",
bgc=color.color)
color.change()
pm.button('createTextureRefObject_button',
l="create texture ref. object",
c=repeated_callback(Render.create_texture_ref_object),
ann="create texture ref. object",
bgc=color.color)
pm.text(l='========== Texture Tools =============')
color.change()
pm.button('assign_substance_textures_button',
l="Assign Substance Textures",
c=repeated_callback(Render.assign_substance_textures),
ann=Render.assign_substance_textures.__doc__,
bgc=color.color)
color.change()
pm.button('normalize_texture_paths_button',
l="Normalize Texture Paths (remove $)",
c=repeated_callback(Render.normalize_texture_paths),
ann=Render.normalize_texture_paths.__doc__,
bgc=color.color)
pm.button('unnormalize_texture_paths_button',
l="Unnormalize Texture Paths (add $)",
c=repeated_callback(Render.unnormalize_texture_paths),
ann=Render.unnormalize_texture_paths.__doc__,
bgc=color.color)
color.change()
pm.button('assign_random_material_color_button',
l="Assign Material with Random Color",
c=repeated_callback(Render.assign_random_material_color),
ann=Render.assign_random_material_color.__doc__,
bgc=color.color)
pm.button('randomize_material_color_button',
l="Randomize Material Color",
c=repeated_callback(Render.randomize_material_color),
ann=Render.randomize_material_color.__doc__,
bgc=color.color)
color.change()
pm.button('import_image_as_plane_button',
l="Import Image as Plane",
c=repeated_callback(Render.import_image_as_plane),
ann=Render.import_image_as_plane.__doc__,
bgc=color.color)
pm.text(l='============ Camera Tools ============')
color.change()
pm.button(
'CameraFilmOffsetTool_button',
l="Camera Film Offset Tool",
c=repeated_callback(
camera_tools.camera_film_offset_tool
),
ann="Camera Film Offset Tool",
bgc=color.color
)
def camera_focus_plane_tool_callback():
"""callback for the camera_focus_plane_tool
"""
camera = pm.ls(sl=1)[0]
camera_tools.camera_focus_plane_tool(camera)
pm.button(
'CameraFocusPlaneTool_button',
l="Camera Focus Plane Tool",
c=repeated_callback(camera_focus_plane_tool_callback),
ann="Camera Film Offset Tool",
bgc=color.color
)
pm.button(
'lock_tracked_camera_channels_button',
l="Lock Tracked Camera Channels",
c=repeated_callback(camera_tools.lock_tracked_camera_channels),
ann=camera_tools.lock_tracked_camera_channels.__doc__,
bgc=color.color
)
color.change()
pm.text(l='===== Vertigo =====')
pm.button('vertigo_setup_look_at_button',
l="Setup -> Look At",
c=repeated_callback(Render.vertigo_setup_look_at),
ann="Setup Look At",
bgc=color.color)
pm.button('vertigo_setup_vertigo_button',
l="Setup -> Vertigo",
c=repeated_callback(Render.vertigo_setup_vertigo),
ann="Setup Vertigo",
bgc=color.color)
pm.button('vertigo_delete_button',
l="Delete",
c=repeated_callback(Render.vertigo_delete),
ann="Delete",
bgc=color.color)
pm.text(l='===================')
pm.button('oyTracker2Null_button', l="oyTracker2Null",
c=repeated_callback(mel.eval, 'oyTracker2Null'),
ann="Tracker2Null",
bgc=color.color)
with pm.rowLayout(nc=3, adj=1):
def import_3dequalizer_points_callback():
"""callback for Import 3DEqualizer points
"""
cam_width = pm.intField('import_3DEqualizer_points_width_int_field', q=1, v=1)
cam_height = pm.intField('import_3DEqualizer_points_height_int_field', q=1, v=1)
camera_tools.import_3dequalizer_points(cam_width, cam_height)
pm.button(
'import_3DEqualizer_points_button', l="Import 3DEqualizer Points",
c=repeated_callback(import_3dequalizer_points_callback),
ann=camera_tools.import_3dequalizer_points.__doc__,
bgc=color.color
)
pm.intField('import_3DEqualizer_points_width_int_field', min=1, v=1920)
pm.intField('import_3DEqualizer_points_height_int_field', min=1, v=1080)
pm.text(l='===================')
color.change()
pm.button('reloadFileTextures_button',
l="reload file textures",
c=repeated_callback(Render.reload_file_textures),
ann="reload file textures",
bgc=color.color)
color.change()
pm.button('transfer_shaders_button',
l="Transfer Shaders",
c=repeated_callback(Render.transfer_shaders),
ann="Transfers shaders from one group to other, use it"
"for LookDev -> Alembic",
bgc=color.color)
color.change()
pm.button('fitPlacementToUV_button',
l="fit placement to UV",
c=repeated_callback(Render.fit_placement_to_UV),
ann="fit placement to UV",
bgc=color.color)
pm.button(
'connect_placement2d_to_file_texture_button',
l='Connect Placement2D to File Texture',
c=repeated_callback(Render.connect_placement2d_to_file),
ann=Render.connect_placement2d_to_file.__doc__,
bgc=color.color
)
color.change()
with pm.rowLayout(nc=2, adj=1):
def enable_subdiv_callback():
max_tess = pm.intField('enable_subdiv_int_field', q=1, v=1)
Render.enable_subdiv_on_selected(
max_subdiv=max_tess, fixed_tes=False
)
pm.button(
'enable_subdiv_on_selected_objects_button',
l='Enable Subdiv (Adaptive)',
c=repeated_callback(enable_subdiv_callback),
ann='Enables Arnold/RedShift Subdiv (catclark) on '
'selected objects',
bgc=color.color
)
pm.intField('enable_subdiv_int_field', min=0, v=3)
with pm.rowLayout(nc=2, adj=1):
def fixed_tess_callback():
max_tess = pm.intField('fixed_tess_int_field', q=1, v=1)
Render.enable_subdiv_on_selected(
fixed_tes=True, max_subdiv=max_tess
)
pm.button(
'enable_fixed_subdiv_on_selected_objects_button',
l='Enable Subdiv (Fixed Tes.)',
c=repeated_callback(fixed_tess_callback),
ann='Enables Arnold/RedShift Subdiv (catclark) on selected '
'objects with fixed tessellation',
bgc=color.color
)
pm.intField('fixed_tess_int_field', min=0, v=1)
pm.button(
'disable_subdiv_on_selected_objects_button',
l='Disable Subdiv',
c=repeated_callback(Render.disable_subdiv_on_selected),
ann=Render.disable_subdiv.__doc__,
bgc=color.color
)
color.change()
pm.button(
'export_shader_data_button',
l='Export Shader Attributes',
c=repeated_callback(Render.export_shader_attributes),
ann=Render.export_shader_attributes.__doc__,
bgc=color.color
)
pm.button(
'import_shader_data_button',
l='Import Shader Attributes',
c=repeated_callback(Render.import_shader_attributes),
ann=Render.import_shader_attributes.__doc__,
bgc=color.color
)
color.change()
pm.button(
'export_shader_to_houdini_button',
l='Export Shader Assignments To Houdini',
c=repeated_callback(Render.export_shader_assignments_to_houdini),
ann=Render.export_shader_assignments_to_houdini.__doc__,
bgc=color.color
)
color.change()
pm.button(
'create_eye_shader_and_controls_button',
l='Create Eye Shader and Controls',
c=repeated_callback(Render.create_eye_shader_and_controls),
ann='Creates eye shaders and controls for the selected eyes',
bgc=color.color
)
pm.button(
'setup_outer_eye_render_attributes_button',
l='Setup Outer Eye Render Attributes',
c=repeated_callback(Render.setup_outer_eye_render_attributes),
ann=Render.setup_outer_eye_render_attributes.__doc__,
bgc=color.color
)
pm.button(
'setup_window_glass_render_attributes_button',
l='Setup **Window Glass** Render Attributes',
c=repeated_callback(Render.setup_window_glass_render_attributes),
ann=Render.setup_window_glass_render_attributes.__doc__,
bgc=color.color
)
pm.button(
'setup_dummy_window_light_button',
l='Setup/Update **Dummy Window** Light Plane',
c=repeated_callback(Render.dummy_window_light_plane),
ann=Render.dummy_window_light_plane.__doc__,
bgc=color.color
)
color.change()
pm.button(
'create_generic_tooth_shader_button',
l='Create Generic TOOTH Shader',
c=repeated_callback(Render.create_generic_tooth_shader),
ann=Render.create_generic_gum_shader.__doc__,
bgc=color.color
)
pm.button(
'create_generic_gum_shader_button',
l='Create Generic GUM Shader',
c=repeated_callback(Render.create_generic_gum_shader),
ann=Render.create_generic_gum_shader.__doc__,
bgc=color.color
)
pm.button(
'create_generic_tongue_shader_button',
l='Create Generic TONGUE Shader',
c=repeated_callback(Render.create_generic_tongue_shader),
ann=Render.create_generic_tongue_shader.__doc__,
bgc=color.color
)
color.change()
pm.button('convert_to_ai_image_button',
l="To aiImage",
c=repeated_callback(
Render.convert_file_node_to_ai_image_node),
ann="Converts the selected File (file texture) nodes to "
"aiImage nodes, also connects the place2dTexture "
"node if necessary",
bgc=color.color)
color.change()
pm.button('to_bbox_button',
l="aiStandIn To BBox",
c=repeated_callback(Render.standin_to_bbox),
ann="Convert selected stand ins to bbox",
bgc=color.color)
pm.button('to_polywire_button',
l="aiStandIn To Polywire",
c=repeated_callback(Render.standin_to_polywire),
ann="Convert selected stand ins to polywire",
bgc=color.color)
color.change()
with pm.rowLayout(nc=3, adj=3, bgc=color.color):
min_range_field = pm.floatField(
minValue=1000,
maxValue=50000,
step=1,
pre=0,
value=3500,
w=50,
bgc=color.color,
ann='Min Value'
)
max_range_field = pm.floatField(
minValue=1000,
maxValue=50000,
step=1,
pre=0,
value=6500,
w=50,
bgc=color.color,
ann='Max Value'
)
pm.button(
ann="Randomize Color Temperature",
l="Randomize Color Temp.",
w=70,
c=repeated_callback(
Render.randomize_light_color_temp,
min_range_field,
max_range_field
),
bgc=color.color
)
with pm.rowLayout(nc=3, adj=3, bgc=color.color):
min_range_field = pm.floatField(
minValue=0,
maxValue=200,
step=0.1,
pre=1,
value=10,
w=50,
bgc=color.color,
ann='Min Value'
)
max_range_field = pm.floatField(
minValue=0,
maxValue=200,
step=0.1,
pre=1,
value=20,
w=50,
bgc=color.color,
ann='Max Value'
)
pm.button(
ann="Randomize Exposure",
l="Randomize Exposure",
w=70,
c=repeated_callback(
Render.randomize_light_intensity,
min_range_field,
max_range_field
),
bgc=color.color
)
color.change()
pm.button(
ann="Create Reflection Curve",
l="Reflection Curve",
c=repeated_callback(
Render.generate_reflection_curve
),
bgc=color.color
)
color.change()
pm.button(
ann="Import GPU Content",
l="Import GPU Content",
c=repeated_callback(
Render.import_gpu_content
),
bgc=color.color
)
color.change()
with pm.rowLayout(nc=3, adj=3, bgc=color.color):
source_driver_field = pm.textField(
text='S:',
w=50,
bgc=color.color,
ann='Source Driver'
)
target_driver_field = pm.textField(
text='L:',
w=50,
bgc=color.color,
ann='Target Driver'
)
pm.button(
ann="Move Cache Files to Another Location",
l="Move Cache Files",
w=70,
c=repeated_callback(
Render.move_cache_files_wrapper,
source_driver_field,
target_driver_field
),
bgc=color.color
)
# store commands
__commands__.extend(render_columnLayout.children())
# ----- PREVIS ------
previs_columnLayout = pm.columnLayout(
'previs_columnLayout',
adj=True,
cal="center",
rs=row_spacing
)
with previs_columnLayout:
color.reset()
pm.button('split_camera_button',
l="Split Camera",
c=repeated_callback(Previs.split_camera),
ann=Previs.split_camera.__doc__,
bgc=color.color)
color.change()
pm.button('shots_from_camera_button',
l="Shots From Camera",
c=repeated_callback(Previs.shots_from_cams),
ann=Previs.shots_from_cams.__doc__,
bgc=color.color)
color.change()
pm.button('auto_rename_shots_button',
l="Auto Rename Shots",
c=repeated_callback(Previs.auto_rename_shots),
ann=Previs.auto_rename_shots.__doc__,
bgc=color.color)
color.change()
pm.button('save_previs_to_shots_button',
l="Save Previs To Shots",
c=repeated_callback(Previs.save_previs_to_shots),
ann=Previs.save_previs_to_shots.__doc__,
bgc=color.color)
color.change()
pm.button('very_nice_camera_rig_button',
l="Create a Very Nice Camera Rig",
c=repeated_callback(camera_tools.very_nice_camera_rig),
ann=camera_tools.very_nice_camera_rig.__doc__,
bgc=color.color)
# store commands
__commands__.extend(previs_columnLayout.children())
# ----- ANIMATION ------
animation_columnLayout = pm.columnLayout(
'animation_columnLayout',
adj=True,
cal="center",
rs=row_spacing
)
with animation_columnLayout:
color.reset()
color.change()
from anima.env.mayaEnv import picker
pm.text(l='===== Object Picker =====')
pm.button('picker_setParent_button',
l="Set Parent",
c=repeated_callback(picker.set_parent),
ann="Set Parent",
bgc=color.color)
pm.button('picker_releaseObject_button',
l="Release",
c=repeated_callback(picker.release_object),
ann="Release Object",
bgc=color.color)
pm.button('picker_editKeyframes_button',
l="Edit Keyframes",
c=repeated_callback(picker.edit_keyframes),
ann="Edit Keyframes",
bgc=color.color)
pm.button('picker_fixJump_button',
l="Fix Jump",
c=repeated_callback(picker.fix_jump),
ann="Fix Jump",
bgc=color.color)
pm.button('picker_explodeSetup_button',
l="Explode",
c=repeated_callback(picker.explode_setup),
ann="Explode Setup",
bgc=color.color)
color.change()
from anima.env.mayaEnv import pivot_switcher
pm.text(l='===== Pivot Switcher =====')
pm.button('oyPivotSwitcher_setupPivot_button',
l="Setup",
c=repeated_callback(pivot_switcher.setup_pivot),
ann="Setup Pivot",
bgc=color.color)
pm.button('oyPivotSwitcher_switchPivot_button',
l="Switch",
c=repeated_callback(pivot_switcher.switch_pivot),
ann="Switch Pivot",
bgc=color.color)
pm.button('oyPivotSwitcher_togglePivot_button',
l="Toggle",
c=repeated_callback(pivot_switcher.toggle_pivot),
ann="Toggle Pivot",
bgc=color.color)
color.change()
pm.text(l='===== Alembic Tools =====')
pm.button('bake_all_constraints_button',
l="Bake All Constraints",
c=repeated_callback(Animation.bake_all_constraints),
ann=Animation.bake_all_constraints.__doc__,
bgc=color.color)
pm.button('bake_alembic_animations_button',
l="Bake Alembic Animations",
c=repeated_callback(Animation.bake_alembic_animations),
ann=Animation.bake_alembic_animations.__doc__,
bgc=color.color)
rowLayout = pm.rowLayout(nc=2, adj=1, bgc=color.color)
with rowLayout:
pm.button(
'abc_from_selected_button',
l='From Selected',
c=repeated_callback(Animation.create_alembic_command),
ann='Creates Alembic Cache from selected nodes',
bgc=color.color
)
from_top_node_checkBox = pm.checkBox(
'from_top_node_checkBox',
l="Top Node",
value=True,
bgc=color.color
)
# pm.button(
# 'abc_from_source_to_target_button',
# l='Source -> Target',
# c=repeated_callback(Animation.copy_alembic_data),
# ann='Copy Alembic Data from Source to Target by the matching '
# 'node names',
# bgc=color.color
# )
# rowLayout = pm.rowLayout(nc=2, adj=1, bgc=color.color)
pm.text(l='===== EXPORT =====')
with pm.rowLayout(nc=3, adj=3):
pm.checkBoxGrp(
'export_alembic_of_nodes_checkbox_grp',
l='Alembic Options',
numberOfCheckBoxes=2,
labelArray2=['Isolate', 'Unload Refs'],
cl3=['left', 'left', 'left'],
cw3=[100, 60, 60],
valueArray2=[1, 1]
)
pm.intFieldGrp(
'export_alembic_of_nodes_handles_int_slider_grp',
l='Handles',
el='frames',
nf=1,
adj=2,
cw3=[65, 1, 20],
v1=1,
)
def export_alembic_callback_with_options(func):
"""calls the function with the parameters from the ui
:param func:
:return:
"""
isolate, unload_refs = pm.checkBoxGrp(
'export_alembic_of_nodes_checkbox_grp',
q=1,
valueArray2=1
)
handles = pm.intFieldGrp('export_alembic_of_nodes_handles_int_slider_grp', q=1, v1=1)
func(isolate=isolate, unload_refs=unload_refs, handles=handles)
pm.button(
'export_alembic_of_selected_cacheable_nodes_button',
l='Selected Cacheable Nodes',
c=repeated_callback(export_alembic_callback_with_options, auxiliary.export_alembic_of_selected_cacheable_nodes),
ann=auxiliary.export_alembic_of_selected_cacheable_nodes.__doc__.split('\n')[0],
bgc=color.color
)
pm.button(
'export_alembic_of_all_cacheable_nodes_button',
l='ALL Cacheable Nodes',
c=repeated_callback(export_alembic_callback_with_options, auxiliary.export_alembic_of_all_cacheable_nodes),
ann=auxiliary.export_alembic_of_all_cacheable_nodes.__doc__.split('\n')[0],
bgc=color.color
)
pm.button(
'export_alembic_on_farm_button',
l='Export Alembic On Farm',
c=repeated_callback(Animation.export_alembics_on_farm),
ann=Animation.export_alembics_on_farm.__doc__.split('\n')[0],
bgc=color.color
)
pm.text(l='===== Playblast Tools =====')
color.change()
pm.button(
'playblast_on_farm_button',
l='PLayblast On Farm',
c=repeated_callback(Animation.playblast_on_farm),
ann=Animation.playblast_on_farm.__doc__.split('\n')[0],
bgc=color.color
)
pm.text(l='===== Exporters =====')
color.change()
rowLayout = pm.rowLayout(nc=3, adj=3, bgc=color.color)
with rowLayout:
start = int(pm.playbackOptions(q=1, minTime=1))
end = int(pm.playbackOptions(q=1, maxTime=1))
startButtonField = pm.textField(
text=start, w=50, bgc=color.color, ann='start frame'
)
endButtonField = pm.textField(
text=end, w=50, bgc=color.color, ann='end frame'
)
pm.button(ann="Exports maya camera to nuke",
l="cam2chan", w=70,
c=repeated_callback(
Animation.cam_2_chan,
startButtonField,
endButtonField
),
bgc=color.color)
pm.text(l='===== Component Animation =====')
color.change()
smooth_selected_keyframes_text_fbg = pm.textFieldButtonGrp(
'smooth_selected_keyframes_text_fbg_button',
bl="Smooth Selected Keyframes",
adj=2, tx=1, cw=(1, 40),
ann="select keyframes in graph editor to smooth",
bgc=color.color
)
def smooth_selected_keyframes_text_fbg_callback():
iteration = int(
pm.textFieldButtonGrp(
"smooth_selected_keyframes_text_fbg_button", q=1, tx=1
)
)
Animation.smooth_selected_keyframes(iteration)
pm.textFieldButtonGrp(
smooth_selected_keyframes_text_fbg,
e=1,
bc=repeated_callback(
smooth_selected_keyframes_text_fbg_callback
)
)
smooth_component_anim = pm.textFieldButtonGrp(
'oySmoothComponentAnimation_button',
bl="Smooth Component Animation",
adj=2, tx=1, cw=(1, 40),
ann="select components to smooth",
bgc=color.color
)
pm.textFieldButtonGrp(
smooth_component_anim,
e=1,
bc=repeated_callback(
Animation.smooth_component_animation,
smooth_component_anim
)
)
color.change()
pm.button(
'bake_component_animation_button',
l='Bake component animation to Locator',
c=repeated_callback(Animation.bake_component_animation),
ann='Creates a locator at the center of selected components '
'and moves it with the components along the current '
'frame range',
bgc=color.color
)
pm.button(
'create_follicle_button',
l='Attach Follicle',
c=repeated_callback(Animation.attach_follicle),
ann='Attaches a follicle in the selected components',
bgc=color.color
)
pm.button(
'equalize_node_speed_button',
l='Equalize Node Speed',
c=repeated_callback(Animation.equalize_node_speed),
ann=Animation.equalize_node_speed.__doc__,
bgc=color.color
)
pm.text(l='===== Generic Tools =====')
color.change()
pm.button(
'set_range_from_shot_node_button',
l='Range From Shot',
c=repeated_callback(Animation.set_range_from_shot),
ann='Sets the playback range from the shot node in the scene',
bgc=color.color
)
color.change()
pm.button(
'delete_base_anim_layer_button',
l='Delete Base Anim Layer',
c=repeated_callback(Animation.delete_base_anim_layer),
ann=Animation.delete_base_anim_layer.__doc__,
bgc=color.color
)
# store commands
__commands__.extend(animation_columnLayout.children())
# Obsolete
obsolete_columnLayout = pm.columnLayout(
'obsolete_columnLayout',
adj=True,
cal="center",
ann="Obsolete",
rs=row_spacing
)
with obsolete_columnLayout:
color.reset()
pm.button('addMiLabel_button', l="add miLabel to selected",
c=repeated_callback(Render.add_miLabel),
ann="add miLabel to selected",
bgc=color.color)
color.change()
pm.button('connectFacingRatioToVCoord_button',
l="connect facingRatio to vCoord",
c=repeated_callback(
Render.connect_facingRatio_to_vCoord),
ann="connect facingRatio to vCoord",
bgc=color.color)
color.change()
with pm.rowLayout(nc=3, rat=(1, "both", 0), adj=1):
pm.text('miFinalGatherCast_text',
l="miFinalGatherCast",
bgc=color.color)
pm.button('set_miFinalGatherCast_ON_button', l="ON",
c=repeated_callback(
set_shape_attribute_wrapper,
"miFinalGatherCast",
1
),
bgc=(0, 1, 0))
pm.button('set_miFinalGatherCast_OFF_button', l="OFF",
c=repeated_callback(
set_shape_attribute_wrapper,
"miFinalGatherCast",
0
),
bgc=(1, 0, 0))
with pm.rowLayout(nc=3, rat=(1, "both", 0), adj=1):
pm.text('miFinalGatherReceive_text',
l="miFinalGatherReceive",
bgc=color.color)
pm.button('set_miFinalGatherReceive_ON_button', l="ON",
c=repeated_callback(
set_shape_attribute_wrapper,
"miFinalGatherReceive",
1
),
bgc=(0, 1, 0))
pm.button('set_miFinalGatherReceive_OFF_button',
l="OFF",
c=repeated_callback(
set_shape_attribute_wrapper,
"miFinalGatherReceive",
0
),
bgc=(1, 0, 0))
with pm.rowLayout(nc=3, rat=(1, "both", 0), adj=1):
pm.text('miFinalGatherHide_text',
l="miFinalGatherHide",
bgc=color.color)
pm.button('set_miFinalGatherHide_ON_button', l="ON",
c=repeated_callback(Render.set_finalGatherHide, 1),
bgc=(0, 1, 0))
pm.button('set_miFinalGatherHide_OFF_button', l="OFF",
c=repeated_callback(Render.set_finalGatherHide, 0),
bgc=(1, 0, 0))
color.change()
pm.button('convertToMRTexture_button',
l="use mib_texture_filter_lookup",
c=repeated_callback(
Render.use_mib_texture_filter_lookup),
ann=(
"adds an mib_texture_filter_lookup node in \n" +
"between the file nodes and their outputs, to \n" +
"get a sharper look output from the texture file"),
bgc=color.color)
pm.button('convertToLinear_button',
l="convert to Linear texture",
c=repeated_callback(Render.convert_to_linear),
ann="convert to Linear texture",
bgc=color.color)
pm.button('useImageSequence_button',
l="use image sequence for \nmentalrayTexture",
c=repeated_callback(Render.use_image_sequence),
ann="use image sequence for \nmentalrayTexture",
bgc=color.color)
color.change()
pm.button('oyAddToSelectedContainer_button',
l="add to selected container",
c=repeated_callback(Render.add_to_selected_container),
ann="add to selected container",
bgc=color.color)
pm.button('oyRemoveFromContainer_button',
l="remove from selected container",
c=repeated_callback(Render.remove_from_container),
ann="remove from selected container",
bgc=color.color)
color.change()
pm.button('oySmedgeRenderSlicer_button',
l="oySmedgeRenderSlicer",
c=repeated_callback(mel.eval, 'oySmedgeRenderSlicer'),
ann="SmedgeRenderSlicer",
bgc=color.color)
color.change()
pm.button(
'exponentialSmooth_button',
l="exponential smooth",
c=repeated_callback(Modeling.polySmoothFace, 0),
ann="applies exponential smooth to selected objects",
bgc=color.color
)
pm.button(
'linearSmooth_button',
l="linear smooth",
c=repeated_callback(Modeling.polySmoothFace, 1),
ann="applies linear smooth to selected objects",
bgc=color.color
)
pm.button(
'deActivateSmooth_button',
l="deActivate smooth",
c=repeated_callback(Modeling.activate_deActivate_smooth, 1),
ann="deActivates all polySmoothFace nodes in the "
"scene",
bgc=color.color
)
pm.button(
'activateSmooth_button',
l="activate smooth",
c=repeated_callback(Modeling.activate_deActivate_smooth, 0),
ann="activates all deActivated polySmoothFace nodes "
"in the scene",
bgc=color.color
)
pm.button(
'deleteSmooth_button',
l="delete smooth",
c=repeated_callback(Modeling.delete_smooth),
ann="deletes all the polySmoothFace nodes from the "
"scene",
bgc=color.color
)
pm.button(
'deleteSmoothOnSelected_button',
l="delete smooth on selected",
c=repeated_callback(Modeling.delete_smooth_on_selected),
ann="deletes selected polySmoothFace nodes from scene",
bgc=color.color
)
color.change()
pm.button(
'deleteAllSound_button', l="delete all sound",
c=repeated_callback(General.delete_all_sound),
ann="delete all sound",
bgc=color.color
)
pm.button(
'displayHandlesOfSelectedObjects_button',
l="toggle handles of selected objects",
c=repeated_callback(
General.toggle_attributes,
"displayHandle"
),
ann="select objects to toggle handle",
bgc=color.color
)
color.change()
pm.button(
'referenceSelectedObjects_button',
l="reference selected objects",
c=repeated_callback(
General.reference_selected_objects
),
ann="sets objects display override to reference",
bgc=color.color
)
pm.button(
'dereferenceSelectedObjects_button',
l="de-reference selected objects",
c=repeated_callback(
General.dereference_selected_objects
),
ann="sets objects display override to reference",
bgc=color.color
)
color.change()
pm.button(
'oyDeReferencer_button', l="dereferencer",
c=repeated_callback(General.dereferencer),
ann="sets all objects display override to normal",
bgc=color.color
)
color.change()
enable_matte_row_layout = pm.rowLayout(nc=6, adj=1)
with enable_matte_row_layout:
pm.text(
l='Enable Arnold Matte',
)
pm.button(
l='Default',
c=repeated_callback(Render.enable_matte, 0),
ann='Enables Arnold Matte on selected objects with <b>No Color</b>',
bgc=color.color
)
pm.button(
l='R',
c=repeated_callback(Render.enable_matte, 1),
ann='Enables Arnold Matte on selected objects with <b>Red</b>',
bgc=[1, 0, 0]
)
pm.button(
l='G',
c=repeated_callback(Render.enable_matte, 2),
ann='Enables Arnold Matte on selected objects with <b>Green</b>',
bgc=[0, 1, 0]
)
pm.button(
l='B',
c=repeated_callback(Render.enable_matte, 3),
ann='Enables Arnold Matte on selected objects with <b>Blue</b>',
bgc=[0, 0, 1]
)
pm.button(
l='A',
c=repeated_callback(Render.enable_matte, 4),
ann='Enables Arnold Matte on selected objects with <b>Alpha</b>',
bgc=[0.5, 0.5, 0.5]
)
color.change()
pm.button(
'fix_render_layer_out_adjustment_errors_button',
l="fixRenderLayerOutAdjustmentErrors",
c='pm.mel.eval("fixRenderLayerOutAdjustmentErrors();")',
ann="fixRenderLayerOutAdjustmentErrors",
bgc=color.color
)
pm.separator()
color.change()
with pm.rowLayout(nc=2, adj=2):
apply_to_hierarchy_checkBox = pm.checkBox(
'apply_to_hierarchy_checkBox',
l="Apply to Hierarchy",
value=True,
bgc=color.color
)
disable_undo_queue_check_box = pm.checkBox(
'disable_undo_queue_checkBox',
l="Disable Undo",
value=False,
bgc=color.color
)
attr_names = [
'castsShadows', 'receiveShadows', 'motionBlur',
'primaryVisibility', 'visibleInReflections',
'visibleInRefractions', 'aiSelfShadows', 'aiOpaque',
'aiVisibleInDiffuse', 'aiVisibleInGlossy', 'aiMatte',
'overrideShaders'
]
for attr_name in attr_names:
with pm.rowLayout(nc=4, rat=(1, "both", 0), adj=1):
pm.text('%s_text' % attr_name, l=attr_name, bgc=color.color)
pm.button(
'set_%s_ON_button' % attr_name,
l="ON",
c=repeated_callback(
set_shape_attribute_wrapper,
attr_name,
1,
),
bgc=(0, 1, 0)
)
pm.button(
'set_%s_OFF_button' % attr_name,
l="OFF",
c=repeated_callback(
set_shape_attribute_wrapper,
attr_name,
0
),
bgc=(1, 0, 0)
)
pm.button(
'set_%s_REMOVE_button' % attr_name,
l="REM",
ann='Remove Override',
c=repeated_callback(
set_shape_attribute_wrapper,
attr_name,
-1
),
bgc=(0, 0.5, 1)
)
pm.separator()
color.change()
pm.button(
l='Setup Z-Layer',
c=repeated_callback(Render.create_z_layer),
ann=Render.create_z_layer.__doc__,
bgc=color.color
)
pm.button(
l='Setup EA Matte',
c=repeated_callback(Render.create_ea_matte),
ann=Render.create_ea_matte.__doc__,
bgc=color.color
)
color.change()
pm.text(l='===== BarnDoor Simulator =====')
pm.button(
'barn_door_simulator_setup_button',
l='Setup',
c=repeated_callback(Render.barndoor_simulator_setup),
ann='Creates a arnold barn door simulator to the selected '
'light',
bgc=color.color
)
pm.button(
'barn_door_simulator_unsetup_button',
l='Un-Setup',
c=repeated_callback(Render.barndoor_simulator_unsetup),
ann='Removes the barn door simulator nodes from the selected '
'light',
bgc=color.color
)
pm.button(
'fix_barndoors_button',
l='Fix BarnDoors',
c=repeated_callback(Render.fix_barndoors),
ann=Render.fix_barndoors.__doc__,
bgc=color.color
)
color.change()
pm.button(
'ai_skin_sss_to_ai_skin_button',
l='aiSkinSSS --> aiSkin',
c=repeated_callback(Render.convert_aiSkinSSS_to_aiSkin),
ann=Render.convert_aiSkinSSS_to_aiSkin.__doc__,
bgc=color.color
)
pm.button(
'normalize_sss_weights_button',
l='Normalize SSS Weights',
c=repeated_callback(Render.normalize_sss_weights),
ann=Render.normalize_sss_weights.__doc__,
bgc=color.color
)
# store commands
__commands__.extend(obsolete_columnLayout.children())
pm.tabLayout(
main_tab_layout,
edit=True,
tabLabel=[
(general_column_layout, "Gen"),
(reference_columnLayout, "Ref"),
(modeling_column_layout, "Mod"),
(rigging_columnLayout, "Rig"),
(render_columnLayout, "Ren"),
(previs_columnLayout, "Prev"),
(animation_columnLayout, "Ani"),
(obsolete_columnLayout, "Obs")
],
cc=functools.partial(store_tab_index, main_tab_layout)
)
dock_control = pm.dockControl(
"toolbox_dockControl",
l='toolbox',
content=toolbox_window,
area="left",
allowedArea=["left", "right"],
width=width
)
# switch to last tab
last_tab_index = get_last_tab_index()
if last_tab_index:
pm.tabLayout(
main_tab_layout,
e=1,
sti=last_tab_index
)
def store_tab_index(tab_layout):
val = pm.tabLayout(tab_layout, q=1, sti=1)
os.environ[__last_tab__] = str(val)
def get_last_tab_index():
"""returns the last tab index from settings
"""
return int(os.environ.get(__last_tab__, 0))
| 37.608459 | 128 | 0.48752 |
import functools
import os
from anima.env.mayaEnv.animation import Animation
from anima.env.mayaEnv.general import General
from anima.env.mayaEnv.modeling import Modeling
from anima.env.mayaEnv.previs import Previs
from anima.env.mayaEnv.reference import Reference
from anima.env.mayaEnv.render import Render
from anima.env.mayaEnv.rigging import Rigging
import pymel.core as pm
import maya.mel as mel
from anima.env.mayaEnv import auxiliary, camera_tools
__last_commands__ = []
__last_tab__ = 'ANIMA_TOOLBOX_LAST_TAB_INDEX'
__commands__ = []
def repeater(index):
global __last_commands__
try:
call_data = __last_commands__[index]
return call_data[0](*call_data[1], **call_data[2])
except IndexError:
return None
def repeat_last(call_data):
global __last_commands__
index = len(__last_commands__)
callable_ = call_data[0]
args = call_data[1]
kwargs = call_data[2]
command = \
'print \\"\\";python(\\\"from anima.env.mayaEnv.toolbox import ' \
'repeater; repeater(%s);\\\");' % index
repeat_last_command = 'repeatLast -ac "%(command)s" -acl "%(label)s";' % {
'command': command,
'label': callable_.__name__
}
print(repeat_last_command)
pm.mel.eval(repeat_last_command)
__last_commands__.append(call_data)
callable_(*args, **kwargs)
def repeated_callback(callable_, *args, **kwargs):
return pm.Callback(
repeat_last, [callable_, args, kwargs]
)
class Color(object):
colors = [
(1.000, 0.500, 0.666),
(1.000, 0.833, 0.500),
(0.666, 1.000, 0.500),
(0.500, 1.000, 0.833),
(0.500, 0.666, 1.000),
(0.833, 0.500, 1.000)
]
def __init__(self, index=0):
self.index = index
self.max_colors = len(self.colors)
def change(self):
self.index = int((self.index + 1) % self.max_colors)
def reset(self):
self.index = 0
@property
def color(self):
return self.colors[self.index]
def filter_tools(search_text):
for command in __commands__:
uitype = command.type()
if uitype == 'button':
label = command.getLabel()
if search_text.lower() not in label.lower():
command.setVisible(False)
else:
command.setVisible(True)
elif uitype == 'rowLayout':
children = command.children()
matched_children = False
for c in children:
c_uitype = c.type()
if c_uitype in ['button', 'staticText'] and \
search_text in c.getLabel().lower():
matched_children = True
break
if not matched_children:
command.setVisible(False)
else:
command.setVisible(True)
def UI():
width = 260
height = 650
row_spacing = 3
color = Color()
global __commands__
__commands__ = []
if pm.dockControl("toolbox_dockControl", q=True, ex=True):
pm.deleteUI("toolbox_dockControl")
window_name = "toolbox_window"
if pm.window(window_name, q=True, ex=True):
pm.deleteUI(window_name, wnd=True)
toolbox_window = pm.window(
window_name,
wh=(width, height),
title="Anima ToolBox"
)
main_form_layout = pm.formLayout(
'main_form_layout', nd=100, parent=toolbox_window
)
search_field = pm.textField(
'search_text_field',
tcc=filter_tools,
placeholderText='Search...',
parent=main_form_layout
)
main_tab_layout = pm.tabLayout(
'main_tab_layout', scr=True, cr=True, parent=main_form_layout
)
pm.formLayout(
main_form_layout, edit=True,
attachForm=[
(search_field, "top", 0),
(search_field, "left", 0),
(search_field, "right", 0),
(main_tab_layout, "bottom", 0),
(main_tab_layout, "left", 0),
(main_tab_layout, "right", 0)
],
attachNone=[
(search_field, "bottom")
],
attachControl=[
(main_tab_layout, "top", 0, search_field)
]
)
with main_tab_layout:
general_column_layout = pm.columnLayout(
'general_column_layout',
adj=True,
cal="center",
rs=row_spacing
)
with general_column_layout:
color.change()
pm.button(
'open_version_button',
l="Open Version",
c=repeated_callback(General.version_dialog, mode=1),
ann="Open Version",
bgc=color.color
)
pm.button(
'save_as_version_button',
l="Save As Version",
c=repeated_callback(General.version_dialog, mode=0),
ann="Save As Version",
bgc=color.color
)
color.change()
pm.button(
'selectionManager_button',
l="Selection Manager",
c=repeated_callback(General.selection_manager),
ann="Selection Manager",
bgc=color.color
)
color.change()
pm.button(
'publishChecker_button',
l="Publish Checker",
c=repeated_callback(General.publish_checker),
ann="Publish Checker",
bgc=color.color
)
color.change()
pm.button(
'rename_unique_button',
l='Rename Unique',
c=repeated_callback(General.rename_unique),
ann=General.rename_unique.__doc__,
bgc=color.color
)
pm.button(
'removeColonFromNames_button',
l="remove colon(:) from node names",
c=repeated_callback(General.remove_colon_from_names),
ann="removes the colon (:) character from all "
"selected object names",
bgc=color.color
)
pm.button(
'removePastedFromNames_button',
l="remove \"pasted_\" from node names",
c=repeated_callback(General.remove_pasted),
ann="removes the \"passed__\" from all selected "
"object names",
bgc=color.color
)
color.change()
pm.button(
'togglePolyMeshes_button',
l="toggle polymesh visibility",
c=repeated_callback(General.toggle_poly_meshes),
ann="toggles the polymesh display in the active model "
"panel",
bgc=color.color
)
color.change()
pm.button(
'selectSetMembers_button',
l="select set members",
c=repeated_callback(General.select_set_members),
ann="selects the selected set members in correct "
"order",
bgc=color.color
)
color.change()
pm.button(
'delete_unused_intermediate_shapes_button',
l='Delete Unused Intermediate Shape Nodes',
c=repeated_callback(General.delete_unused_intermediate_shapes),
ann='Deletes unused (no connection) intermediate shape nodes',
bgc=color.color
)
color.change()
pm.button(
'export_transform_info_button',
l='Export Transform Info',
c=repeated_callback(General.export_transform_info),
ann='exports transform info',
bgc=color.color
)
pm.button(
'import_transform_info_button',
l='Import Transform Info',
c=repeated_callback(General.import_transform_info),
ann='imports transform info',
bgc=color.color
)
color.change()
pm.button(
'export_global_transform_info_button',
l='Export Global Transform Info',
c=repeated_callback(General.export_transform_info, True),
ann='exports global transform info',
bgc=color.color
)
pm.button(
'import_global_transform_info_button',
l='Import Global Transform Info',
c=repeated_callback(General.import_transform_info, True),
ann='imports global transform info',
bgc=color.color
)
color.change()
pm.button(
'export_component_transform_info_button',
l='Export Component Transform Info',
c=repeated_callback(General.export_component_transform_info),
ann='exports component transform info',
bgc=color.color
)
pm.button(
'import_component_transform_info_button',
l='Import Component Transform Info',
c=repeated_callback(General.import_component_transform_info),
ann='imports component transform info',
bgc=color.color
)
color.change()
pm.button(
'import_rsproxy_data_from_houdini_button',
l='Import RSProxy Data From Houdini',
c=repeated_callback(General.rsproxy_data_importer),
ann=General.rsproxy_data_importer.__doc__,
bgc=color.color
)
color.change()
pm.button(
'generate_thumbnail_button',
l='Generate Thumbnail',
c=repeated_callback(General.generate_thumbnail),
ann='Generates thumbnail for current scene',
bgc=color.color
)
color.change()
pm.button(
'cleanup_light_cameras_button',
l='Cleanup Light Cameras',
c=repeated_callback(General.cleanup_light_cameras),
ann=General.cleanup_light_cameras.__doc__,
bgc=color.color
)
color.change()
from anima.env.mayaEnv.general import unknown_plugin_cleaner_ui
pm.button(
'cleanup_plugins_button',
l='Cleanup Unknown Plugins',
c=repeated_callback(unknown_plugin_cleaner_ui),
ann=unknown_plugin_cleaner_ui.__doc__,
bgc=color.color
)
color.change()
pm.button(
'unshape_parent_node_button',
l='Unshape Parent Nodes',
c=repeated_callback(General.unshape_parent_nodes),
ann=General.unshape_parent_nodes.__doc__,
bgc=color.color
)
__commands__.extend(general_column_layout.children())
reference_columnLayout = pm.columnLayout(
'reference_columnLayout',
adj=True, cal="center", rs=row_spacing)
with reference_columnLayout:
color.reset()
pm.text(l='===== Reference Tools =====')
pm.button(
'nsDelete_button',
l="nsDelete",
c=repeated_callback(General.namespace_deleter),
ann=General.namespace_deleter.__doc__,
bgc=color.color
)
color.change()
pm.button(
'duplicate_selected_reference_button',
l='Duplicate Selected Reference',
c=repeated_callback(Reference.duplicate_selected_reference),
ann='Duplicates the selected reference',
bgc=color.color
)
color.change()
pm.button(
'select_reference_in_reference_editor_button',
l='Select Reference In Reference Editor',
c=repeated_callback(
Reference.select_reference_in_reference_editor
),
ann=Reference.select_reference_in_reference_editor.__doc__,
bgc=color.color
)
color.change()
pm.button(
'get_selected_reference_path_button',
l='Get Selected Reference Path',
c=repeated_callback(Reference.get_selected_reference_path),
ann='Prints the selected reference full path',
bgc=color.color
)
pm.button(
'open_selected_reference_button',
l='Open Selected Reference in New Maya',
c=repeated_callback(Reference.open_reference_in_new_maya),
ann='Opens the selected reference in new Maya '
'instance',
bgc=color.color
)
color.change()
pm.button(
'publish_model_as_look_dev_button',
l='Model -> LookDev',
c=repeated_callback(Reference.publish_model_as_look_dev),
ann='References the current Model scene to the LookDev scene '
'of the same task, creates the LookDev scene if '
'necessary, also reopens the current model scene.',
bgc=color.color
)
color.change()
pm.button(
'fix_reference_namespace_button',
l='Fix Reference Namespace',
c=repeated_callback(Reference.fix_reference_namespace),
ann='Fixes old style reference namespaces with new one, '
'creates new versions if necessary.',
bgc=color.color
)
color.change()
pm.button(
'fix_reference_paths_button',
l='Fix Reference Paths',
c=repeated_callback(Reference.fix_reference_paths),
ann='Fixes reference paths deeply, so they will use'
'$REPO env var.',
bgc=color.color
)
pm.button(
'fix_student_license_on_references_button',
l='Fix Student License Error On References',
c=repeated_callback(
Reference.fix_student_license_on_references
),
ann=Reference.fix_student_license.__doc__,
bgc=color.color
)
pm.button(
'fix_student_license_on_files_button',
l='Fix Student License Error On Selected Files',
c=repeated_callback(
Reference.fix_student_license_on_selected_file
),
ann=Reference.fix_student_license.__doc__,
bgc=color.color
)
color.change()
pm.button(
'archive_button',
l='Archive Current Scene',
c=repeated_callback(Reference.archive_current_scene),
ann='Creates a ZIP file containing the current scene and its'
'references in a flat Maya default project folder '
'structure',
bgc=color.color
)
pm.button(
'bind_to_original_button',
l='Bind To Original',
c=repeated_callback(Reference.bind_to_original),
ann='Binds the current local references to the ones on the '
'repository',
bgc=color.color
)
pm.button(
'unload_selected_references_button',
l='Unload Selected References',
c=repeated_callback(Reference.unload_selected_references),
ann='Unloads the highest references that is related with the selected objects',
bgc=color.color
)
pm.button(
'unload_unselected_references_button',
l='Unload UnSelected References',
c=repeated_callback(Reference.unload_unselected_references),
ann='Unloads any references that is not related with the '
'selected objects',
bgc=color.color
)
color.change()
pm.button(
'remove_selected_references_button',
l='Remove Selected References',
c=repeated_callback(Reference.remove_selected_references),
ann='Removes the highest references that is related with the selected objects',
bgc=color.color
)
color.change()
pm.text(l='===== Representation Tools =====')
with pm.rowLayout(nc=2, adj=1):
pm.checkBoxGrp(
'generate_repr_types_checkbox_grp',
l='Reprs',
numberOfCheckBoxes=3,
labelArray3=['GPU', 'ASS', 'RS'],
cl4=['left', 'left', 'left', 'left'],
cw4=[51, 50, 50, 50],
valueArray3=[1, 1, 1]
)
pm.checkBox(
'generate_repr_skip_existing_checkBox',
l='Skip existing Reprs.',
value=0
)
pm.button(
'generate_repr_of_all_references_button',
l='Deep Generate Repr Of All References',
c=repeated_callback(
Reference.generate_repr_of_all_references_caller
),
ann='Deeply generates desired Representations of all '
'references of this scene',
bgc=color.color
)
pm.button(
'generate_repr_of_scene_button',
l='Generate Repr Of This Scene',
c=repeated_callback(Reference.generate_repr_of_scene_caller),
ann='Generates desired Representations of this scene',
bgc=color.color
)
color.change()
with pm.rowLayout(nc=2, adj=1):
pm.radioButtonGrp(
'repr_apply_to_radio_button_grp',
l='Apply To',
labelArray2=['Selected', 'All References'],
numberOfRadioButtons=2,
cl3=['left', 'left', 'left'],
cw3=[50, 65, 65],
sl=1
)
pm.button(
'to_base_button',
l='To Base',
c=repeated_callback(Reference.to_base),
ann='Convert selected to Base representation',
bgc=color.color
)
pm.button(
'to_gpu_button',
l='To GPU',
c=repeated_callback(Reference.to_gpu),
ann='Convert selected to GPU representation',
bgc=color.color
)
pm.button(
'to_ass_button',
l='To ASS',
c=repeated_callback(Reference.to_ass),
ann='Convert selected to ASS representation',
bgc=color.color
)
pm.button(
'to_rs_button',
l='To RS',
c=repeated_callback(Reference.to_rs),
ann='Convert selected to RS representation',
bgc=color.color
)
color.change()
pm.button(
'update_alembic_references_button',
l='Update Alembic References',
c=repeated_callback(auxiliary.update_alembic_references),
ann=auxiliary.update_alembic_references.__doc__,
bgc=color.color
)
__commands__.extend(reference_columnLayout.children())
modeling_column_layout = pm.columnLayout(
'modeling_column_layout',
adj=True, cal="center", rs=row_spacing)
with modeling_column_layout:
color.reset()
pm.button('toggleFaceNormalDisplay_button',
l="toggle face normal display",
c=repeated_callback(
pm.runtime.ToggleFaceNormalDisplay),
ann="toggles face normal display",
bgc=color.color)
pm.button('reverseNormals_button', l="reverse normals",
c=repeated_callback(Modeling.reverse_normals),
ann="reverse normals",
bgc=color.color)
pm.button('fixNormals_button', l="fix normals",
c=repeated_callback(Modeling.fix_normals),
ann="applies setToFace then conform and then "
"soften edge to all selected objects",
bgc=color.color)
color.change()
pm.button(
'oyHierarchyInstancer_button',
l="hierarchy_instancer on selected",
c=repeated_callback(Modeling.hierarchy_instancer),
ann="hierarchy_instancer on selected",
bgc=color.color
)
color.change()
pm.button(
'relax_verts_button',
l="Relax Vertices",
c=repeated_callback(Modeling.relax_vertices),
ann="opens relax_vertices",
bgc=color.color
)
with pm.rowLayout(nc=4, adj=1):
def smooth_edges_callback():
iteration = pm.intSliderGrp(
"smooth_edges_iteration_intField", q=1, v=1
)
Modeling.smooth_edges(iteration=iteration)
pm.button(
'smooth_edges_button',
l="Smooth Edges",
c=repeated_callback(smooth_edges_callback),
ann=Modeling.smooth_edges.__doc__,
bgc=color.color
)
pm.intSliderGrp(
'smooth_edges_iteration_intField',
v=100,
min=0,
max=100
)
color.change()
pm.button(
'create_curve_from_mesh_edges_button',
l="Curve From Mesh Edges",
c=repeated_callback(Modeling.create_curve_from_mesh_edges),
ann="Creates a curve from selected mesh edges",
bgc=color.color
)
color.change()
pm.button(
'vertex_aligned_locator_button',
l="Vertex Aligned Locator",
c=repeated_callback(Modeling.vertex_aligned_locator),
ann="Creates an aligned locator from selected vertices",
bgc=color.color
)
color.change()
with pm.rowLayout(nc=8, rat=(1, "both", 0), adj=1):
pm.text('set_pivot_text', l='Set Pivot', bgc=color.color)
pm.button(
'center_button',
l="C",
c=repeated_callback(
Modeling.set_pivot,
0
),
bgc=(0.8, 0.8, 0.8)
)
pm.button(
'minus_X_button',
l="-X",
c=repeated_callback(
Modeling.set_pivot,
1
),
bgc=(1.000, 0.500, 0.666)
)
pm.button(
'plus_X_button',
l="+X",
c=repeated_callback(
Modeling.set_pivot,
2
),
bgc=(1.000, 0.500, 0.666)
)
pm.button(
'minus_Y_button',
l="-Y",
c=repeated_callback(
Modeling.set_pivot,
3
),
bgc=(0.666, 1.000, 0.500)
)
pm.button(
'plus_Y_button',
l="+Y",
c=repeated_callback(
Modeling.set_pivot,
4
),
bgc=(0.666, 1.000, 0.500)
)
pm.button(
'minus_Z_button',
l="-X",
c=repeated_callback(
Modeling.set_pivot,
5
),
bgc=(0.500, 0.666, 1.000)
)
pm.button(
'plus_Z_button',
l="+X",
c=repeated_callback(
Modeling.set_pivot,
6
),
bgc=(0.500, 0.666, 1.000)
)
color.change()
with pm.rowLayout(nc=7, rat=(1, "both", 0), adj=1):
pm.text(l='Text. Res', bgc=color.color)
pm.button(
l="128",
c=repeated_callback(
Modeling.set_texture_res,
128
),
bgc=Color.colors[0]
)
pm.button(
l="256",
c=repeated_callback(
Modeling.set_texture_res,
256
),
bgc=Color.colors[1]
)
pm.button(
l="512",
c=repeated_callback(
Modeling.set_texture_res,
512
),
bgc=Color.colors[2]
)
pm.button(
l="1024",
c=repeated_callback(
Modeling.set_texture_res,
1024
),
bgc=Color.colors[3]
)
pm.button(
l='2048',
c=repeated_callback(
Modeling.set_texture_res,
2048
),
bgc=Color.colors[4]
)
pm.button(
l='4096',
c=repeated_callback(
Modeling.set_texture_res,
4096
),
bgc=Color.colors[5]
)
pm.text(l='========== UV Tools =============')
color.change()
pm.button(
'fix_uvsets_button',
l="Fix UVSets (DiffuseUV -> map1)",
c=repeated_callback(Modeling.fix_uvsets),
ann=Modeling.fix_uvsets,
bgc=color.color
)
color.change()
pm.button(
'select_zero_uv_area_faces_button',
l="Filter Zero UV Area Faces",
c=repeated_callback(Modeling.select_zero_uv_area_faces),
ann="Selects faces with zero uv area",
bgc=color.color
)
color.change()
pm.button(
'create_auto_uvmap_button',
l='Create Auto UVMap',
c=repeated_callback(Modeling.create_auto_uvmap),
ann=Modeling.create_auto_uvmap.__doc__,
bgc=color.color
)
with pm.rowLayout(nc=6, adj=1):
def transfer_uvs_button_callback(*args, **kwargs):
label_lut = {
'W': 0,
'L': 1,
'UV': 2,
'C': 3,
'T': 4
}
sample_space = label_lut[
pm.radioCollection(
'transfer_uvs_radio_collection',
q=1, sl=1
)
]
Modeling.transfer_uvs(sample_space=sample_space)
pm.button('transfer_uvs_button',
l="Transfer UVs",
c=repeated_callback(transfer_uvs_button_callback),
ann="Transfers UVs from one group to other, use it"
"for LookDev -> Alembic",
bgc=color.color)
pm.radioCollection('transfer_uvs_radio_collection')
button_with = 40
pm.radioButton(
'W', w=button_with, al='left', ann='World'
)
pm.radioButton(
'L', w=button_with, al='left', ann='Local'
)
pm.radioButton(
'UV', w=button_with, al='left', ann='UV'
)
pm.radioButton(
'C', w=button_with, al='left', ann='Component', sl=1
)
pm.radioButton(
'T', w=button_with, al='left', ann='Topology'
)
color.change()
pm.text(l='======= Manipulator Tools =======')
pm.button('set_to_point_button',
l="Set To Point",
c=repeated_callback(pm.mel.eval, "manipMoveOrient 1;"),
ann="Set manipulator to the point",
bgc=color.color)
pm.button('set_to_edge_button',
l="Set To Edge",
c=repeated_callback(pm.mel.eval, "manipMoveOrient 2;"),
ann="Set manipulator to the edge",
bgc=color.color)
pm.button('set_to_face_button',
l="Set To Face",
c=repeated_callback(pm.mel.eval, "manipMoveOrient 3;"),
ann="Set manipulator to the face",
bgc=color.color)
color.change()
pm.button('create_bbox_from_selection_button',
l="Create BBOX from selection",
c=repeated_callback(Modeling.bbox_from_selection),
ann=Modeling.bbox_from_selection.__doc__,
bgc=color.color)
__commands__.extend(modeling_column_layout.children())
rigging_columnLayout = pm.columnLayout(
'rigging_columnLayout',
adj=True, cal="center",
rs=row_spacing
)
with rigging_columnLayout:
color.reset()
pm.button(
'create_joints_on_curve_ui_button',
l="Create Joints On Curve UI",
c=repeated_callback(Rigging.create_joints_on_curve_ui),
ann=Rigging.create_joints_on_curve_ui.__doc__,
bgc=color.color
)
pm.button(
'mirror_transformation_button',
l="Mirror Transformation",
c=repeated_callback(Rigging.mirror_transformation),
ann=Rigging.mirror_transformation.__doc__,
bgc=color.color
)
color.change()
pm.button(
'IKFKLimbRigger_button',
l="IK/FK Limb Rigger",
c=repeated_callback(Rigging.ik_fk_limb_rigger),
ann=Rigging.ik_fk_limb_rigger.__doc__,
bgc=color.color
)
with pm.rowLayout(nc=2, adj=1):
def ik_fk_limb_rigger_callback():
subdivision = pm.intField('bendy_ik_fk_subdivision_count_field', q=1, v=1)
Rigging.bendy_ik_fk_limb_rigger(subdivision=subdivision)
pm.button(
'bendy_ik_fk_limb_rigger_button',
l='IK/FK Limb Rigger (Bendy)',
c=repeated_callback(ik_fk_limb_rigger_callback),
ann=Rigging.bendy_ik_fk_limb_rigger.__doc__,
bgc=color.color
)
pm.intField('bendy_ik_fk_subdivision_count_field', min=0, v=2)
pm.button(
'ReverseFootRigger_button',
l="Reverse Foot Rigger",
c=repeated_callback(Rigging.reverse_foot_rigger),
ann=Rigging.reverse_foot_rigger.__doc__,
bgc=color.color
)
pm.button(
'squashStretchBendRigger_button',
l="Squash/Stretch/Bend Rigger",
c=repeated_callback(Rigging.squash_stretch_bend_rigger),
ann=Rigging.squash_stretch_bend_rigger.__doc__,
bgc=color.color
)
pm.button(
'setupStretchySplineIKCurve_button',
l="setup stretchy splineIK curve",
c=repeated_callback(Rigging.setup_stretchy_spline_ik_curve),
ann="connects necessary nodes to calculate arcLength "
"change in percent",
bgc=color.color
)
pm.button(
'selectJointsDeformingTheObject_button',
l="select joints deforming the object",
c=repeated_callback(Rigging.select_joints_deforming_object),
ann="select joints that deform the object",
bgc=color.color
)
color.change()
pm.button(
'create_axial_correction_group_button',
l="Create Axial Correction Groups",
c=repeated_callback(Rigging.axial_correction_group),
ann=Rigging.axial_correction_group.__doc__,
bgc=color.color
)
pm.button(
'create_zv_parent_compatible_groups_button',
l="Create ZV Parent Compatible Groups",
c=repeated_callback(Rigging.create_zv_parent_compatible_groups),
ann=Rigging.axial_correction_group.__doc__,
bgc=color.color
)
color.change()
pm.button(
'setClustersToAbsolute_button',
l="set selected clusters to absolute",
c=repeated_callback(Rigging.set_clusters_relative_state, 0),
ann="set Clusters to Absolute",
bgc=color.color
)
pm.button(
'setClustersToRelative_button',
l="set selected clusters to relative",
c=repeated_callback(
Rigging.set_clusters_relative_state, 1
),
ann="set Clusters to Relative",
bgc=color.color
)
color.change()
pm.button(
'addControllerShape_button',
l="add controller shape",
c=repeated_callback(Rigging.add_controller_shape),
ann="add the shape in the selected joint",
bgc=color.color
)
pm.button(
'replaceControllerShape_button',
l="replace controller shape",
c=repeated_callback(Rigging.replace_controller_shape),
ann="replaces the shape in the selected joint",
bgc=color.color
)
color.change()
def pin_controller_callback(color, *args):
from anima.env.mayaEnv import rigging
vertex = pm.ls(sl=1)[0]
pc = rigging.PinController()
pc.color = color
pc.pin_to_vertex = vertex
pc.setup()
with pm.rowLayout(nc=4, adj=1):
pm.text(l="Pin Controller")
pm.button('pin_controller_red_button', l="R",
c=repeated_callback(pin_controller_callback, [1, 0, 0]),
ann=pin_controller_callback.__doc__,
bgc=[1, 0, 0])
pm.button('pin_controller_green_button', l="G",
c=repeated_callback(pin_controller_callback, [0, 1, 0]),
ann=pin_controller_callback.__doc__,
bgc=[0, 1, 0])
pm.button('pin_controller_blue_button', l="B",
c=repeated_callback(pin_controller_callback, [0, 0, 1]),
ann=pin_controller_callback.__doc__,
bgc=[0, 0, 1])
pm.button('rivet_button', l="create rivet",
c=repeated_callback(mel.eval, 'rivet'),
ann="create rivet",
bgc=color.color)
pm.button('oyAutoRivet_button', l="auto rivet",
c=repeated_callback(mel.eval, 'oyAutoRivet'),
ann="auto rivet",
bgc=color.color)
pm.button(
'oyAutoRivetFollicle_button',
l="auto rivet (Follicle)",
c=repeated_callback(auxiliary.auto_rivet),
ann="creates a rivet setup by using hair follicles",
bgc=color.color
)
pm.button(
'rivet_per_face_button',
l="rivet per face (Follicle)",
c=repeated_callback(auxiliary.rivet_per_face),
ann="creates a rivet setup per selected face by using hair "
"follicles",
bgc=color.color
)
pm.button('create_hair_from_curves_button',
l="Create Hair From Curves",
c=repeated_callback(auxiliary.hair_from_curves),
ann="creates hair from curves",
bgc=color.color)
color.change()
pm.button('artPaintSkinWeightsTool_button',
l="paint weights tool",
c=repeated_callback(mel.eval, 'ArtPaintSkinWeightsTool'),
ann="paint weights tool",
bgc=color.color)
def skin_tools_ui_caller(*args):
from anima.env.mayaEnv.rigging import SkinToolsUI
st = SkinToolsUI()
st.ui()
pm.button('skin_tools_button', l="Skin Tools",
c=skin_tools_ui_caller,
ann="skin tools",
bgc=color.color)
pm.button('oyFixBoundJoint_button', l="fix_bound_joint",
c=repeated_callback(Rigging.fix_bound_joint),
ann="fix_bound_joint",
bgc=color.color)
pm.button('toggle_local_rotation_axes_button',
l="Toggle Local Rotation Axes",
c=repeated_callback(General.toggle_attributes, "displayLocalAxis"),
ann="Toggle Local Rotation Axes",
bgc=color.color)
pm.button('toggle_display_rotate_pivot_button',
l="Toggle Display Rotate Pivot",
c=repeated_callback(General.toggle_attributes, "displayRotatePivot"),
ann="Toggle Display Rotate Pivot",
bgc=color.color)
pm.button('seroBlendController_button',
l="seroBlendController",
c=repeated_callback(mel.eval, 'seroBlendController'),
ann="seroBlendController",
bgc=color.color)
pm.button('align_to_pole_vector_button',
l="Align To Pole Vector",
c=repeated_callback(auxiliary.align_to_pole_vector),
ann="align to pole vector",
bgc=color.color)
color.change()
pm.button('oyResetCharSet_button', l="oyResetCharSet",
c=repeated_callback(mel.eval, 'oyResetCharSet'),
ann="reset char set",
bgc=color.color)
pm.button('export_blend_connections_button',
l="Export blend connections",
c=repeated_callback(auxiliary.export_blend_connections),
ann="export blend connections",
bgc=color.color)
color.change()
pm.button('createFollicles_button', l="create follicles",
c=repeated_callback(Rigging.create_follicles),
ann="create follicles",
bgc=color.color)
color.change()
pm.button('oyResetTweaks_button', l="reset tweaks",
c=repeated_callback(Rigging.reset_tweaks),
ann="reset tweaks",
bgc=color.color)
color.change()
def add_cacheable_attribute_callback():
for node in pm.selected():
Rigging.add_cacheable_attribute(node)
pm.button('add_cacheable_attr_button', l="add `cacheable` attribute",
c=repeated_callback(add_cacheable_attribute_callback),
ann=add_cacheable_attribute_callback.__doc__,
bgc=color.color)
__commands__.extend(rigging_columnLayout.children())
render_columnLayout = pm.columnLayout(
'render_columnLayout',
adj=True,
cal="center",
rs=row_spacing
)
with render_columnLayout:
color.reset()
color.change()
pm.button(
'update_render_settings_button',
l="Update Render Settings",
c=repeated_callback(Render.update_render_settings),
ann=Render.update_render_settings.__doc__,
bgc=color.color
)
color.change()
pm.button(
'delete_render_layers_button',
l="Delete Render Layers",
c=repeated_callback(Render.delete_render_layers),
ann=Render.delete_render_layers.__doc__,
bgc=color.color
)
pm.button(
'delete_display_layers_button',
l="Delete Display Layers",
c=repeated_callback(Render.delete_display_layers),
ann=Render.delete_display_layers.__doc__,
bgc=color.color
)
pm.button(
'delete_render_and_display_layers_button',
l="Delete Render and Display Layers",
c=repeated_callback(Render.delete_render_and_display_layers),
ann=Render.delete_render_and_display_layers.__doc__,
bgc=color.color
)
color.change()
pm.button(
'delete_unused_shading_nodes_button',
l="Delete Unused Shading Nodes",
c=repeated_callback(Render.delete_unused_shading_nodes),
ann=Render.delete_unused_shading_nodes.__doc__,
bgc=color.color
)
color.change()
pm.button(
'duplicate_input_graph_button',
l="Duplicate Input Graph",
c=repeated_callback(Render.duplicate_input_graph),
ann=Render.duplicate_input_graph.__doc__,
bgc=color.color
)
pm.button(
'duplicate_with_connections_button',
l="Duplicate With Connections To Network",
c=repeated_callback(Render.duplicate_with_connections),
ann=Render.duplicate_with_connections.__doc__,
bgc=color.color
)
color.change()
pm.text(l='=========== RedShift Tools ===========')
pm.button(
'generate_rs_from_selection_button',
l='Generate RSProxy From Selection',
c=repeated_callback(Render.generate_rsproxy_from_selection),
ann=Render.generate_rsproxy_from_selection.__doc__,
bgc=color.color
)
pm.button(
'generate_rs_from_selection_per_selection_button',
l='Generate RSProxy From Selection (Per Selection)',
c=repeated_callback(Render.generate_rsproxy_from_selection, True),
ann=Render.generate_rsproxy_from_selection.__doc__,
bgc=color.color
)
pm.button(
'set_rsproxy_to_bbox_button',
l='RSProxy -> Bounding Box',
c=repeated_callback(Render.rsproxy_to_bounding_box),
ann=Render.rsproxy_to_bounding_box.__doc__,
bgc=color.color
)
pm.button(
'set_rsproxy_to_preview_mesh_button',
l='RSProxy -> Preview Mesh',
c=repeated_callback(Render.rsproxy_to_preview_mesh),
ann=Render.rsproxy_to_preview_mesh.__doc__,
bgc=color.color
)
color.change()
pm.text(l='===== RedShift IC + IPC Bake =====')
pm.button(
'redshift_ic_ipc_bake_button',
l="Do Bake",
c=repeated_callback(Render.redshift_ic_ipc_bake),
ann=Render.redshift_ic_ipc_bake.__doc__,
bgc=color.color
)
pm.button(
'redshift_ic_ipc_bake_restore_button',
l="Restore Settings",
c=repeated_callback(Render.redshift_ic_ipc_bake_restore),
ann=Render.redshift_ic_ipc_bake_restore.__doc__,
bgc=color.color
)
pm.text(l='======================================')
color.change()
pm.button(
'submit_afanasy_button',
l="Afanasy Job Submitter",
c=repeated_callback(Render.afanasy_job_submitter),
ann=Render.afanasy_job_submitter.__doc__,
bgc=color.color
)
color.change()
pm.button(
'open_node_in_browser_button',
l="Open node in browser",
c=repeated_callback(Render.open_node_in_browser),
ann="Open node in browser",
bgc=color.color
)
color.change()
pm.button('auto_convert_to_redshift_button',
l="Auto Convert Scene To RedShift (BETA)",
c=repeated_callback(Render.auto_convert_to_redshift),
ann="Automatically converts the scene from Arnold to "
"Redshift, including materials and lights",
bgc=color.color)
pm.button('convert_nodes_to_redshift_button',
l="Convert Selected To RedShift (BETA)",
c=repeated_callback(Render.convert_nodes_to_redshift),
ann="Automatically converts the selected node from "
"Arnold to Redshift",
bgc=color.color)
def set_shape_attribute_wrapper(attr_name, value):
apply_to_hierarchy = pm.checkBox(
apply_to_hierarchy_checkBox,
q=True,
v=True
)
disable_undo = pm.checkBox(
disable_undo_queue_check_box,
q=True,
v=True
)
Render.set_shape_attribute(
attr_name,
value,
apply_to_hierarchy,
disable_undo
)
with pm.rowLayout(nc=3, rat=(1, "both", 0), adj=1):
pm.text('renderThumbnailUpdate_text',
l="renderThumbnailUpdate",
bgc=color.color)
pm.button('set_renderThumbnailUpdate_ON_button',
l="ON",
c=repeated_callback(pm.renderThumbnailUpdate, 1),
bgc=(0, 1, 0))
pm.button('set_renderThumbnailUpdate_OFF_button',
l="OFF",
c=repeated_callback(pm.renderThumbnailUpdate, 0),
bgc=(1, 0, 0))
color.change()
pm.button('replaceShadersWithLast_button',
l="replace shaders with last",
c=repeated_callback(Render.replace_shaders_with_last),
ann="replace shaders with last",
bgc=color.color)
color.change()
pm.button('createTextureRefObject_button',
l="create texture ref. object",
c=repeated_callback(Render.create_texture_ref_object),
ann="create texture ref. object",
bgc=color.color)
pm.text(l='========== Texture Tools =============')
color.change()
pm.button('assign_substance_textures_button',
l="Assign Substance Textures",
c=repeated_callback(Render.assign_substance_textures),
ann=Render.assign_substance_textures.__doc__,
bgc=color.color)
color.change()
pm.button('normalize_texture_paths_button',
l="Normalize Texture Paths (remove $)",
c=repeated_callback(Render.normalize_texture_paths),
ann=Render.normalize_texture_paths.__doc__,
bgc=color.color)
pm.button('unnormalize_texture_paths_button',
l="Unnormalize Texture Paths (add $)",
c=repeated_callback(Render.unnormalize_texture_paths),
ann=Render.unnormalize_texture_paths.__doc__,
bgc=color.color)
color.change()
pm.button('assign_random_material_color_button',
l="Assign Material with Random Color",
c=repeated_callback(Render.assign_random_material_color),
ann=Render.assign_random_material_color.__doc__,
bgc=color.color)
pm.button('randomize_material_color_button',
l="Randomize Material Color",
c=repeated_callback(Render.randomize_material_color),
ann=Render.randomize_material_color.__doc__,
bgc=color.color)
color.change()
pm.button('import_image_as_plane_button',
l="Import Image as Plane",
c=repeated_callback(Render.import_image_as_plane),
ann=Render.import_image_as_plane.__doc__,
bgc=color.color)
pm.text(l='============ Camera Tools ============')
color.change()
pm.button(
'CameraFilmOffsetTool_button',
l="Camera Film Offset Tool",
c=repeated_callback(
camera_tools.camera_film_offset_tool
),
ann="Camera Film Offset Tool",
bgc=color.color
)
def camera_focus_plane_tool_callback():
camera = pm.ls(sl=1)[0]
camera_tools.camera_focus_plane_tool(camera)
pm.button(
'CameraFocusPlaneTool_button',
l="Camera Focus Plane Tool",
c=repeated_callback(camera_focus_plane_tool_callback),
ann="Camera Film Offset Tool",
bgc=color.color
)
pm.button(
'lock_tracked_camera_channels_button',
l="Lock Tracked Camera Channels",
c=repeated_callback(camera_tools.lock_tracked_camera_channels),
ann=camera_tools.lock_tracked_camera_channels.__doc__,
bgc=color.color
)
color.change()
pm.text(l='===== Vertigo =====')
pm.button('vertigo_setup_look_at_button',
l="Setup -> Look At",
c=repeated_callback(Render.vertigo_setup_look_at),
ann="Setup Look At",
bgc=color.color)
pm.button('vertigo_setup_vertigo_button',
l="Setup -> Vertigo",
c=repeated_callback(Render.vertigo_setup_vertigo),
ann="Setup Vertigo",
bgc=color.color)
pm.button('vertigo_delete_button',
l="Delete",
c=repeated_callback(Render.vertigo_delete),
ann="Delete",
bgc=color.color)
pm.text(l='===================')
pm.button('oyTracker2Null_button', l="oyTracker2Null",
c=repeated_callback(mel.eval, 'oyTracker2Null'),
ann="Tracker2Null",
bgc=color.color)
with pm.rowLayout(nc=3, adj=1):
def import_3dequalizer_points_callback():
cam_width = pm.intField('import_3DEqualizer_points_width_int_field', q=1, v=1)
cam_height = pm.intField('import_3DEqualizer_points_height_int_field', q=1, v=1)
camera_tools.import_3dequalizer_points(cam_width, cam_height)
pm.button(
'import_3DEqualizer_points_button', l="Import 3DEqualizer Points",
c=repeated_callback(import_3dequalizer_points_callback),
ann=camera_tools.import_3dequalizer_points.__doc__,
bgc=color.color
)
pm.intField('import_3DEqualizer_points_width_int_field', min=1, v=1920)
pm.intField('import_3DEqualizer_points_height_int_field', min=1, v=1080)
pm.text(l='===================')
color.change()
pm.button('reloadFileTextures_button',
l="reload file textures",
c=repeated_callback(Render.reload_file_textures),
ann="reload file textures",
bgc=color.color)
color.change()
pm.button('transfer_shaders_button',
l="Transfer Shaders",
c=repeated_callback(Render.transfer_shaders),
ann="Transfers shaders from one group to other, use it"
"for LookDev -> Alembic",
bgc=color.color)
color.change()
pm.button('fitPlacementToUV_button',
l="fit placement to UV",
c=repeated_callback(Render.fit_placement_to_UV),
ann="fit placement to UV",
bgc=color.color)
pm.button(
'connect_placement2d_to_file_texture_button',
l='Connect Placement2D to File Texture',
c=repeated_callback(Render.connect_placement2d_to_file),
ann=Render.connect_placement2d_to_file.__doc__,
bgc=color.color
)
color.change()
with pm.rowLayout(nc=2, adj=1):
def enable_subdiv_callback():
max_tess = pm.intField('enable_subdiv_int_field', q=1, v=1)
Render.enable_subdiv_on_selected(
max_subdiv=max_tess, fixed_tes=False
)
pm.button(
'enable_subdiv_on_selected_objects_button',
l='Enable Subdiv (Adaptive)',
c=repeated_callback(enable_subdiv_callback),
ann='Enables Arnold/RedShift Subdiv (catclark) on '
'selected objects',
bgc=color.color
)
pm.intField('enable_subdiv_int_field', min=0, v=3)
with pm.rowLayout(nc=2, adj=1):
def fixed_tess_callback():
max_tess = pm.intField('fixed_tess_int_field', q=1, v=1)
Render.enable_subdiv_on_selected(
fixed_tes=True, max_subdiv=max_tess
)
pm.button(
'enable_fixed_subdiv_on_selected_objects_button',
l='Enable Subdiv (Fixed Tes.)',
c=repeated_callback(fixed_tess_callback),
ann='Enables Arnold/RedShift Subdiv (catclark) on selected '
'objects with fixed tessellation',
bgc=color.color
)
pm.intField('fixed_tess_int_field', min=0, v=1)
pm.button(
'disable_subdiv_on_selected_objects_button',
l='Disable Subdiv',
c=repeated_callback(Render.disable_subdiv_on_selected),
ann=Render.disable_subdiv.__doc__,
bgc=color.color
)
color.change()
pm.button(
'export_shader_data_button',
l='Export Shader Attributes',
c=repeated_callback(Render.export_shader_attributes),
ann=Render.export_shader_attributes.__doc__,
bgc=color.color
)
pm.button(
'import_shader_data_button',
l='Import Shader Attributes',
c=repeated_callback(Render.import_shader_attributes),
ann=Render.import_shader_attributes.__doc__,
bgc=color.color
)
color.change()
pm.button(
'export_shader_to_houdini_button',
l='Export Shader Assignments To Houdini',
c=repeated_callback(Render.export_shader_assignments_to_houdini),
ann=Render.export_shader_assignments_to_houdini.__doc__,
bgc=color.color
)
color.change()
pm.button(
'create_eye_shader_and_controls_button',
l='Create Eye Shader and Controls',
c=repeated_callback(Render.create_eye_shader_and_controls),
ann='Creates eye shaders and controls for the selected eyes',
bgc=color.color
)
pm.button(
'setup_outer_eye_render_attributes_button',
l='Setup Outer Eye Render Attributes',
c=repeated_callback(Render.setup_outer_eye_render_attributes),
ann=Render.setup_outer_eye_render_attributes.__doc__,
bgc=color.color
)
pm.button(
'setup_window_glass_render_attributes_button',
l='Setup **Window Glass** Render Attributes',
c=repeated_callback(Render.setup_window_glass_render_attributes),
ann=Render.setup_window_glass_render_attributes.__doc__,
bgc=color.color
)
pm.button(
'setup_dummy_window_light_button',
l='Setup/Update **Dummy Window** Light Plane',
c=repeated_callback(Render.dummy_window_light_plane),
ann=Render.dummy_window_light_plane.__doc__,
bgc=color.color
)
color.change()
pm.button(
'create_generic_tooth_shader_button',
l='Create Generic TOOTH Shader',
c=repeated_callback(Render.create_generic_tooth_shader),
ann=Render.create_generic_gum_shader.__doc__,
bgc=color.color
)
pm.button(
'create_generic_gum_shader_button',
l='Create Generic GUM Shader',
c=repeated_callback(Render.create_generic_gum_shader),
ann=Render.create_generic_gum_shader.__doc__,
bgc=color.color
)
pm.button(
'create_generic_tongue_shader_button',
l='Create Generic TONGUE Shader',
c=repeated_callback(Render.create_generic_tongue_shader),
ann=Render.create_generic_tongue_shader.__doc__,
bgc=color.color
)
color.change()
pm.button('convert_to_ai_image_button',
l="To aiImage",
c=repeated_callback(
Render.convert_file_node_to_ai_image_node),
ann="Converts the selected File (file texture) nodes to "
"aiImage nodes, also connects the place2dTexture "
"node if necessary",
bgc=color.color)
color.change()
pm.button('to_bbox_button',
l="aiStandIn To BBox",
c=repeated_callback(Render.standin_to_bbox),
ann="Convert selected stand ins to bbox",
bgc=color.color)
pm.button('to_polywire_button',
l="aiStandIn To Polywire",
c=repeated_callback(Render.standin_to_polywire),
ann="Convert selected stand ins to polywire",
bgc=color.color)
color.change()
with pm.rowLayout(nc=3, adj=3, bgc=color.color):
min_range_field = pm.floatField(
minValue=1000,
maxValue=50000,
step=1,
pre=0,
value=3500,
w=50,
bgc=color.color,
ann='Min Value'
)
max_range_field = pm.floatField(
minValue=1000,
maxValue=50000,
step=1,
pre=0,
value=6500,
w=50,
bgc=color.color,
ann='Max Value'
)
pm.button(
ann="Randomize Color Temperature",
l="Randomize Color Temp.",
w=70,
c=repeated_callback(
Render.randomize_light_color_temp,
min_range_field,
max_range_field
),
bgc=color.color
)
with pm.rowLayout(nc=3, adj=3, bgc=color.color):
min_range_field = pm.floatField(
minValue=0,
maxValue=200,
step=0.1,
pre=1,
value=10,
w=50,
bgc=color.color,
ann='Min Value'
)
max_range_field = pm.floatField(
minValue=0,
maxValue=200,
step=0.1,
pre=1,
value=20,
w=50,
bgc=color.color,
ann='Max Value'
)
pm.button(
ann="Randomize Exposure",
l="Randomize Exposure",
w=70,
c=repeated_callback(
Render.randomize_light_intensity,
min_range_field,
max_range_field
),
bgc=color.color
)
color.change()
pm.button(
ann="Create Reflection Curve",
l="Reflection Curve",
c=repeated_callback(
Render.generate_reflection_curve
),
bgc=color.color
)
color.change()
pm.button(
ann="Import GPU Content",
l="Import GPU Content",
c=repeated_callback(
Render.import_gpu_content
),
bgc=color.color
)
color.change()
with pm.rowLayout(nc=3, adj=3, bgc=color.color):
source_driver_field = pm.textField(
text='S:',
w=50,
bgc=color.color,
ann='Source Driver'
)
target_driver_field = pm.textField(
text='L:',
w=50,
bgc=color.color,
ann='Target Driver'
)
pm.button(
ann="Move Cache Files to Another Location",
l="Move Cache Files",
w=70,
c=repeated_callback(
Render.move_cache_files_wrapper,
source_driver_field,
target_driver_field
),
bgc=color.color
)
__commands__.extend(render_columnLayout.children())
previs_columnLayout = pm.columnLayout(
'previs_columnLayout',
adj=True,
cal="center",
rs=row_spacing
)
with previs_columnLayout:
color.reset()
pm.button('split_camera_button',
l="Split Camera",
c=repeated_callback(Previs.split_camera),
ann=Previs.split_camera.__doc__,
bgc=color.color)
color.change()
pm.button('shots_from_camera_button',
l="Shots From Camera",
c=repeated_callback(Previs.shots_from_cams),
ann=Previs.shots_from_cams.__doc__,
bgc=color.color)
color.change()
pm.button('auto_rename_shots_button',
l="Auto Rename Shots",
c=repeated_callback(Previs.auto_rename_shots),
ann=Previs.auto_rename_shots.__doc__,
bgc=color.color)
color.change()
pm.button('save_previs_to_shots_button',
l="Save Previs To Shots",
c=repeated_callback(Previs.save_previs_to_shots),
ann=Previs.save_previs_to_shots.__doc__,
bgc=color.color)
color.change()
pm.button('very_nice_camera_rig_button',
l="Create a Very Nice Camera Rig",
c=repeated_callback(camera_tools.very_nice_camera_rig),
ann=camera_tools.very_nice_camera_rig.__doc__,
bgc=color.color)
__commands__.extend(previs_columnLayout.children())
animation_columnLayout = pm.columnLayout(
'animation_columnLayout',
adj=True,
cal="center",
rs=row_spacing
)
with animation_columnLayout:
color.reset()
color.change()
from anima.env.mayaEnv import picker
pm.text(l='===== Object Picker =====')
pm.button('picker_setParent_button',
l="Set Parent",
c=repeated_callback(picker.set_parent),
ann="Set Parent",
bgc=color.color)
pm.button('picker_releaseObject_button',
l="Release",
c=repeated_callback(picker.release_object),
ann="Release Object",
bgc=color.color)
pm.button('picker_editKeyframes_button',
l="Edit Keyframes",
c=repeated_callback(picker.edit_keyframes),
ann="Edit Keyframes",
bgc=color.color)
pm.button('picker_fixJump_button',
l="Fix Jump",
c=repeated_callback(picker.fix_jump),
ann="Fix Jump",
bgc=color.color)
pm.button('picker_explodeSetup_button',
l="Explode",
c=repeated_callback(picker.explode_setup),
ann="Explode Setup",
bgc=color.color)
color.change()
from anima.env.mayaEnv import pivot_switcher
pm.text(l='===== Pivot Switcher =====')
pm.button('oyPivotSwitcher_setupPivot_button',
l="Setup",
c=repeated_callback(pivot_switcher.setup_pivot),
ann="Setup Pivot",
bgc=color.color)
pm.button('oyPivotSwitcher_switchPivot_button',
l="Switch",
c=repeated_callback(pivot_switcher.switch_pivot),
ann="Switch Pivot",
bgc=color.color)
pm.button('oyPivotSwitcher_togglePivot_button',
l="Toggle",
c=repeated_callback(pivot_switcher.toggle_pivot),
ann="Toggle Pivot",
bgc=color.color)
color.change()
pm.text(l='===== Alembic Tools =====')
pm.button('bake_all_constraints_button',
l="Bake All Constraints",
c=repeated_callback(Animation.bake_all_constraints),
ann=Animation.bake_all_constraints.__doc__,
bgc=color.color)
pm.button('bake_alembic_animations_button',
l="Bake Alembic Animations",
c=repeated_callback(Animation.bake_alembic_animations),
ann=Animation.bake_alembic_animations.__doc__,
bgc=color.color)
rowLayout = pm.rowLayout(nc=2, adj=1, bgc=color.color)
with rowLayout:
pm.button(
'abc_from_selected_button',
l='From Selected',
c=repeated_callback(Animation.create_alembic_command),
ann='Creates Alembic Cache from selected nodes',
bgc=color.color
)
from_top_node_checkBox = pm.checkBox(
'from_top_node_checkBox',
l="Top Node",
value=True,
bgc=color.color
)
pm.text(l='===== EXPORT =====')
with pm.rowLayout(nc=3, adj=3):
pm.checkBoxGrp(
'export_alembic_of_nodes_checkbox_grp',
l='Alembic Options',
numberOfCheckBoxes=2,
labelArray2=['Isolate', 'Unload Refs'],
cl3=['left', 'left', 'left'],
cw3=[100, 60, 60],
valueArray2=[1, 1]
)
pm.intFieldGrp(
'export_alembic_of_nodes_handles_int_slider_grp',
l='Handles',
el='frames',
nf=1,
adj=2,
cw3=[65, 1, 20],
v1=1,
)
def export_alembic_callback_with_options(func):
isolate, unload_refs = pm.checkBoxGrp(
'export_alembic_of_nodes_checkbox_grp',
q=1,
valueArray2=1
)
handles = pm.intFieldGrp('export_alembic_of_nodes_handles_int_slider_grp', q=1, v1=1)
func(isolate=isolate, unload_refs=unload_refs, handles=handles)
pm.button(
'export_alembic_of_selected_cacheable_nodes_button',
l='Selected Cacheable Nodes',
c=repeated_callback(export_alembic_callback_with_options, auxiliary.export_alembic_of_selected_cacheable_nodes),
ann=auxiliary.export_alembic_of_selected_cacheable_nodes.__doc__.split('\n')[0],
bgc=color.color
)
pm.button(
'export_alembic_of_all_cacheable_nodes_button',
l='ALL Cacheable Nodes',
c=repeated_callback(export_alembic_callback_with_options, auxiliary.export_alembic_of_all_cacheable_nodes),
ann=auxiliary.export_alembic_of_all_cacheable_nodes.__doc__.split('\n')[0],
bgc=color.color
)
pm.button(
'export_alembic_on_farm_button',
l='Export Alembic On Farm',
c=repeated_callback(Animation.export_alembics_on_farm),
ann=Animation.export_alembics_on_farm.__doc__.split('\n')[0],
bgc=color.color
)
pm.text(l='===== Playblast Tools =====')
color.change()
pm.button(
'playblast_on_farm_button',
l='PLayblast On Farm',
c=repeated_callback(Animation.playblast_on_farm),
ann=Animation.playblast_on_farm.__doc__.split('\n')[0],
bgc=color.color
)
pm.text(l='===== Exporters =====')
color.change()
rowLayout = pm.rowLayout(nc=3, adj=3, bgc=color.color)
with rowLayout:
start = int(pm.playbackOptions(q=1, minTime=1))
end = int(pm.playbackOptions(q=1, maxTime=1))
startButtonField = pm.textField(
text=start, w=50, bgc=color.color, ann='start frame'
)
endButtonField = pm.textField(
text=end, w=50, bgc=color.color, ann='end frame'
)
pm.button(ann="Exports maya camera to nuke",
l="cam2chan", w=70,
c=repeated_callback(
Animation.cam_2_chan,
startButtonField,
endButtonField
),
bgc=color.color)
pm.text(l='===== Component Animation =====')
color.change()
smooth_selected_keyframes_text_fbg = pm.textFieldButtonGrp(
'smooth_selected_keyframes_text_fbg_button',
bl="Smooth Selected Keyframes",
adj=2, tx=1, cw=(1, 40),
ann="select keyframes in graph editor to smooth",
bgc=color.color
)
def smooth_selected_keyframes_text_fbg_callback():
iteration = int(
pm.textFieldButtonGrp(
"smooth_selected_keyframes_text_fbg_button", q=1, tx=1
)
)
Animation.smooth_selected_keyframes(iteration)
pm.textFieldButtonGrp(
smooth_selected_keyframes_text_fbg,
e=1,
bc=repeated_callback(
smooth_selected_keyframes_text_fbg_callback
)
)
smooth_component_anim = pm.textFieldButtonGrp(
'oySmoothComponentAnimation_button',
bl="Smooth Component Animation",
adj=2, tx=1, cw=(1, 40),
ann="select components to smooth",
bgc=color.color
)
pm.textFieldButtonGrp(
smooth_component_anim,
e=1,
bc=repeated_callback(
Animation.smooth_component_animation,
smooth_component_anim
)
)
color.change()
pm.button(
'bake_component_animation_button',
l='Bake component animation to Locator',
c=repeated_callback(Animation.bake_component_animation),
ann='Creates a locator at the center of selected components '
'and moves it with the components along the current '
'frame range',
bgc=color.color
)
pm.button(
'create_follicle_button',
l='Attach Follicle',
c=repeated_callback(Animation.attach_follicle),
ann='Attaches a follicle in the selected components',
bgc=color.color
)
pm.button(
'equalize_node_speed_button',
l='Equalize Node Speed',
c=repeated_callback(Animation.equalize_node_speed),
ann=Animation.equalize_node_speed.__doc__,
bgc=color.color
)
pm.text(l='===== Generic Tools =====')
color.change()
pm.button(
'set_range_from_shot_node_button',
l='Range From Shot',
c=repeated_callback(Animation.set_range_from_shot),
ann='Sets the playback range from the shot node in the scene',
bgc=color.color
)
color.change()
pm.button(
'delete_base_anim_layer_button',
l='Delete Base Anim Layer',
c=repeated_callback(Animation.delete_base_anim_layer),
ann=Animation.delete_base_anim_layer.__doc__,
bgc=color.color
)
__commands__.extend(animation_columnLayout.children())
obsolete_columnLayout = pm.columnLayout(
'obsolete_columnLayout',
adj=True,
cal="center",
ann="Obsolete",
rs=row_spacing
)
with obsolete_columnLayout:
color.reset()
pm.button('addMiLabel_button', l="add miLabel to selected",
c=repeated_callback(Render.add_miLabel),
ann="add miLabel to selected",
bgc=color.color)
color.change()
pm.button('connectFacingRatioToVCoord_button',
l="connect facingRatio to vCoord",
c=repeated_callback(
Render.connect_facingRatio_to_vCoord),
ann="connect facingRatio to vCoord",
bgc=color.color)
color.change()
with pm.rowLayout(nc=3, rat=(1, "both", 0), adj=1):
pm.text('miFinalGatherCast_text',
l="miFinalGatherCast",
bgc=color.color)
pm.button('set_miFinalGatherCast_ON_button', l="ON",
c=repeated_callback(
set_shape_attribute_wrapper,
"miFinalGatherCast",
1
),
bgc=(0, 1, 0))
pm.button('set_miFinalGatherCast_OFF_button', l="OFF",
c=repeated_callback(
set_shape_attribute_wrapper,
"miFinalGatherCast",
0
),
bgc=(1, 0, 0))
with pm.rowLayout(nc=3, rat=(1, "both", 0), adj=1):
pm.text('miFinalGatherReceive_text',
l="miFinalGatherReceive",
bgc=color.color)
pm.button('set_miFinalGatherReceive_ON_button', l="ON",
c=repeated_callback(
set_shape_attribute_wrapper,
"miFinalGatherReceive",
1
),
bgc=(0, 1, 0))
pm.button('set_miFinalGatherReceive_OFF_button',
l="OFF",
c=repeated_callback(
set_shape_attribute_wrapper,
"miFinalGatherReceive",
0
),
bgc=(1, 0, 0))
with pm.rowLayout(nc=3, rat=(1, "both", 0), adj=1):
pm.text('miFinalGatherHide_text',
l="miFinalGatherHide",
bgc=color.color)
pm.button('set_miFinalGatherHide_ON_button', l="ON",
c=repeated_callback(Render.set_finalGatherHide, 1),
bgc=(0, 1, 0))
pm.button('set_miFinalGatherHide_OFF_button', l="OFF",
c=repeated_callback(Render.set_finalGatherHide, 0),
bgc=(1, 0, 0))
color.change()
pm.button('convertToMRTexture_button',
l="use mib_texture_filter_lookup",
c=repeated_callback(
Render.use_mib_texture_filter_lookup),
ann=(
"adds an mib_texture_filter_lookup node in \n" +
"between the file nodes and their outputs, to \n" +
"get a sharper look output from the texture file"),
bgc=color.color)
pm.button('convertToLinear_button',
l="convert to Linear texture",
c=repeated_callback(Render.convert_to_linear),
ann="convert to Linear texture",
bgc=color.color)
pm.button('useImageSequence_button',
l="use image sequence for \nmentalrayTexture",
c=repeated_callback(Render.use_image_sequence),
ann="use image sequence for \nmentalrayTexture",
bgc=color.color)
color.change()
pm.button('oyAddToSelectedContainer_button',
l="add to selected container",
c=repeated_callback(Render.add_to_selected_container),
ann="add to selected container",
bgc=color.color)
pm.button('oyRemoveFromContainer_button',
l="remove from selected container",
c=repeated_callback(Render.remove_from_container),
ann="remove from selected container",
bgc=color.color)
color.change()
pm.button('oySmedgeRenderSlicer_button',
l="oySmedgeRenderSlicer",
c=repeated_callback(mel.eval, 'oySmedgeRenderSlicer'),
ann="SmedgeRenderSlicer",
bgc=color.color)
color.change()
pm.button(
'exponentialSmooth_button',
l="exponential smooth",
c=repeated_callback(Modeling.polySmoothFace, 0),
ann="applies exponential smooth to selected objects",
bgc=color.color
)
pm.button(
'linearSmooth_button',
l="linear smooth",
c=repeated_callback(Modeling.polySmoothFace, 1),
ann="applies linear smooth to selected objects",
bgc=color.color
)
pm.button(
'deActivateSmooth_button',
l="deActivate smooth",
c=repeated_callback(Modeling.activate_deActivate_smooth, 1),
ann="deActivates all polySmoothFace nodes in the "
"scene",
bgc=color.color
)
pm.button(
'activateSmooth_button',
l="activate smooth",
c=repeated_callback(Modeling.activate_deActivate_smooth, 0),
ann="activates all deActivated polySmoothFace nodes "
"in the scene",
bgc=color.color
)
pm.button(
'deleteSmooth_button',
l="delete smooth",
c=repeated_callback(Modeling.delete_smooth),
ann="deletes all the polySmoothFace nodes from the "
"scene",
bgc=color.color
)
pm.button(
'deleteSmoothOnSelected_button',
l="delete smooth on selected",
c=repeated_callback(Modeling.delete_smooth_on_selected),
ann="deletes selected polySmoothFace nodes from scene",
bgc=color.color
)
color.change()
pm.button(
'deleteAllSound_button', l="delete all sound",
c=repeated_callback(General.delete_all_sound),
ann="delete all sound",
bgc=color.color
)
pm.button(
'displayHandlesOfSelectedObjects_button',
l="toggle handles of selected objects",
c=repeated_callback(
General.toggle_attributes,
"displayHandle"
),
ann="select objects to toggle handle",
bgc=color.color
)
color.change()
pm.button(
'referenceSelectedObjects_button',
l="reference selected objects",
c=repeated_callback(
General.reference_selected_objects
),
ann="sets objects display override to reference",
bgc=color.color
)
pm.button(
'dereferenceSelectedObjects_button',
l="de-reference selected objects",
c=repeated_callback(
General.dereference_selected_objects
),
ann="sets objects display override to reference",
bgc=color.color
)
color.change()
pm.button(
'oyDeReferencer_button', l="dereferencer",
c=repeated_callback(General.dereferencer),
ann="sets all objects display override to normal",
bgc=color.color
)
color.change()
enable_matte_row_layout = pm.rowLayout(nc=6, adj=1)
with enable_matte_row_layout:
pm.text(
l='Enable Arnold Matte',
)
pm.button(
l='Default',
c=repeated_callback(Render.enable_matte, 0),
ann='Enables Arnold Matte on selected objects with <b>No Color</b>',
bgc=color.color
)
pm.button(
l='R',
c=repeated_callback(Render.enable_matte, 1),
ann='Enables Arnold Matte on selected objects with <b>Red</b>',
bgc=[1, 0, 0]
)
pm.button(
l='G',
c=repeated_callback(Render.enable_matte, 2),
ann='Enables Arnold Matte on selected objects with <b>Green</b>',
bgc=[0, 1, 0]
)
pm.button(
l='B',
c=repeated_callback(Render.enable_matte, 3),
ann='Enables Arnold Matte on selected objects with <b>Blue</b>',
bgc=[0, 0, 1]
)
pm.button(
l='A',
c=repeated_callback(Render.enable_matte, 4),
ann='Enables Arnold Matte on selected objects with <b>Alpha</b>',
bgc=[0.5, 0.5, 0.5]
)
color.change()
pm.button(
'fix_render_layer_out_adjustment_errors_button',
l="fixRenderLayerOutAdjustmentErrors",
c='pm.mel.eval("fixRenderLayerOutAdjustmentErrors();")',
ann="fixRenderLayerOutAdjustmentErrors",
bgc=color.color
)
pm.separator()
color.change()
with pm.rowLayout(nc=2, adj=2):
apply_to_hierarchy_checkBox = pm.checkBox(
'apply_to_hierarchy_checkBox',
l="Apply to Hierarchy",
value=True,
bgc=color.color
)
disable_undo_queue_check_box = pm.checkBox(
'disable_undo_queue_checkBox',
l="Disable Undo",
value=False,
bgc=color.color
)
attr_names = [
'castsShadows', 'receiveShadows', 'motionBlur',
'primaryVisibility', 'visibleInReflections',
'visibleInRefractions', 'aiSelfShadows', 'aiOpaque',
'aiVisibleInDiffuse', 'aiVisibleInGlossy', 'aiMatte',
'overrideShaders'
]
for attr_name in attr_names:
with pm.rowLayout(nc=4, rat=(1, "both", 0), adj=1):
pm.text('%s_text' % attr_name, l=attr_name, bgc=color.color)
pm.button(
'set_%s_ON_button' % attr_name,
l="ON",
c=repeated_callback(
set_shape_attribute_wrapper,
attr_name,
1,
),
bgc=(0, 1, 0)
)
pm.button(
'set_%s_OFF_button' % attr_name,
l="OFF",
c=repeated_callback(
set_shape_attribute_wrapper,
attr_name,
0
),
bgc=(1, 0, 0)
)
pm.button(
'set_%s_REMOVE_button' % attr_name,
l="REM",
ann='Remove Override',
c=repeated_callback(
set_shape_attribute_wrapper,
attr_name,
-1
),
bgc=(0, 0.5, 1)
)
pm.separator()
color.change()
pm.button(
l='Setup Z-Layer',
c=repeated_callback(Render.create_z_layer),
ann=Render.create_z_layer.__doc__,
bgc=color.color
)
pm.button(
l='Setup EA Matte',
c=repeated_callback(Render.create_ea_matte),
ann=Render.create_ea_matte.__doc__,
bgc=color.color
)
color.change()
pm.text(l='===== BarnDoor Simulator =====')
pm.button(
'barn_door_simulator_setup_button',
l='Setup',
c=repeated_callback(Render.barndoor_simulator_setup),
ann='Creates a arnold barn door simulator to the selected '
'light',
bgc=color.color
)
pm.button(
'barn_door_simulator_unsetup_button',
l='Un-Setup',
c=repeated_callback(Render.barndoor_simulator_unsetup),
ann='Removes the barn door simulator nodes from the selected '
'light',
bgc=color.color
)
pm.button(
'fix_barndoors_button',
l='Fix BarnDoors',
c=repeated_callback(Render.fix_barndoors),
ann=Render.fix_barndoors.__doc__,
bgc=color.color
)
color.change()
pm.button(
'ai_skin_sss_to_ai_skin_button',
l='aiSkinSSS --> aiSkin',
c=repeated_callback(Render.convert_aiSkinSSS_to_aiSkin),
ann=Render.convert_aiSkinSSS_to_aiSkin.__doc__,
bgc=color.color
)
pm.button(
'normalize_sss_weights_button',
l='Normalize SSS Weights',
c=repeated_callback(Render.normalize_sss_weights),
ann=Render.normalize_sss_weights.__doc__,
bgc=color.color
)
__commands__.extend(obsolete_columnLayout.children())
pm.tabLayout(
main_tab_layout,
edit=True,
tabLabel=[
(general_column_layout, "Gen"),
(reference_columnLayout, "Ref"),
(modeling_column_layout, "Mod"),
(rigging_columnLayout, "Rig"),
(render_columnLayout, "Ren"),
(previs_columnLayout, "Prev"),
(animation_columnLayout, "Ani"),
(obsolete_columnLayout, "Obs")
],
cc=functools.partial(store_tab_index, main_tab_layout)
)
dock_control = pm.dockControl(
"toolbox_dockControl",
l='toolbox',
content=toolbox_window,
area="left",
allowedArea=["left", "right"],
width=width
)
last_tab_index = get_last_tab_index()
if last_tab_index:
pm.tabLayout(
main_tab_layout,
e=1,
sti=last_tab_index
)
def store_tab_index(tab_layout):
val = pm.tabLayout(tab_layout, q=1, sti=1)
os.environ[__last_tab__] = str(val)
def get_last_tab_index():
return int(os.environ.get(__last_tab__, 0))
| true | true |
f71cf84b76e986982228c0447aa806b21c91314f | 1,429 | py | Python | tests/test_distribution/test_von_mises_fisher.py | mdeegen/pb_bss | e8c380e27d82707e8d2b2d83c5c918d47ea5d89f | [
"MIT"
] | 171 | 2018-10-22T09:34:45.000Z | 2022-03-19T16:09:20.000Z | tests/test_distribution/test_von_mises_fisher.py | mdeegen/pb_bss | e8c380e27d82707e8d2b2d83c5c918d47ea5d89f | [
"MIT"
] | 19 | 2019-03-14T09:42:58.000Z | 2021-09-03T07:13:03.000Z | tests/test_distribution/test_von_mises_fisher.py | mdeegen/pb_bss | e8c380e27d82707e8d2b2d83c5c918d47ea5d89f | [
"MIT"
] | 40 | 2018-10-11T08:01:54.000Z | 2022-03-05T13:26:15.000Z | import numpy as np
from numpy.testing import assert_allclose, assert_equal
import unittest
from pb_bss.distribution import VonMisesFisher
from pb_bss.distribution import VonMisesFisherTrainer
class TestGaussian(unittest.TestCase):
def test_shapes(self):
samples = 10000
mean = np.ones((3,))
covariance = np.eye(3)
x = np.random.multivariate_normal(mean, covariance, size=(samples,))
model = VonMisesFisherTrainer().fit(x)
assert_equal(model.mean.shape, mean.shape)
assert_equal(model.concentration.shape, ())
def test_shapes_independent_dims(self):
samples = 10000
mean = np.ones((3,))
covariance = np.eye(3)
x = np.random.multivariate_normal(mean, covariance, size=(13, samples,))
model = VonMisesFisherTrainer().fit(x)
assert_equal(model.mean.shape, np.tile(mean, (13, 1)).shape)
assert_equal(model.concentration.shape, (13,))
def test_von_mises_fisher(self):
samples = 10000
mean = np.ones((3,))
mean /= np.linalg.norm(mean, axis=-1)
concentration = 50
# ToDo: Implement VonMisesFisher(...).sample(...)
return
x = VonMisesFisher(mean, concentration).sample(size=(samples,))
model = VonMisesFisherTrainer().fit(x)
assert_allclose(model.mean, mean, atol=0.1)
assert_allclose(model.covariance, concentration, atol=0.1)
| 35.725 | 80 | 0.660602 | import numpy as np
from numpy.testing import assert_allclose, assert_equal
import unittest
from pb_bss.distribution import VonMisesFisher
from pb_bss.distribution import VonMisesFisherTrainer
class TestGaussian(unittest.TestCase):
def test_shapes(self):
samples = 10000
mean = np.ones((3,))
covariance = np.eye(3)
x = np.random.multivariate_normal(mean, covariance, size=(samples,))
model = VonMisesFisherTrainer().fit(x)
assert_equal(model.mean.shape, mean.shape)
assert_equal(model.concentration.shape, ())
def test_shapes_independent_dims(self):
samples = 10000
mean = np.ones((3,))
covariance = np.eye(3)
x = np.random.multivariate_normal(mean, covariance, size=(13, samples,))
model = VonMisesFisherTrainer().fit(x)
assert_equal(model.mean.shape, np.tile(mean, (13, 1)).shape)
assert_equal(model.concentration.shape, (13,))
def test_von_mises_fisher(self):
samples = 10000
mean = np.ones((3,))
mean /= np.linalg.norm(mean, axis=-1)
concentration = 50
return
x = VonMisesFisher(mean, concentration).sample(size=(samples,))
model = VonMisesFisherTrainer().fit(x)
assert_allclose(model.mean, mean, atol=0.1)
assert_allclose(model.covariance, concentration, atol=0.1)
| true | true |
f71cf8aa46d7092006946a492f21beefc661135b | 443 | py | Python | src/constants/database_constants.py | davendiy/QWERTY_messenger | 6bfa5a6ceb7b63f3e57d3d7779a1cda26cd55616 | [
"MIT"
] | null | null | null | src/constants/database_constants.py | davendiy/QWERTY_messenger | 6bfa5a6ceb7b63f3e57d3d7779a1cda26cd55616 | [
"MIT"
] | null | null | null | src/constants/database_constants.py | davendiy/QWERTY_messenger | 6bfa5a6ceb7b63f3e57d3d7779a1cda26cd55616 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*-encoding: utf-8-*-
# created: 25.11.2019
# by David Zashkolny
# 3 course, comp math
# Taras Shevchenko National University of Kyiv
# email: davendiy@gmail.com
TEXT = 0
IMAGE = 1
AUDIO = 2
VIDEO = 3
DOCUMENT = 4
MESSAGE_TYPES = {
TEXT,
IMAGE,
AUDIO,
VIDEO,
DOCUMENT,
}
CHANNELS = "Channels"
CHATS = "Chats"
USERS_CHATS = 'UsersChats'
USERS_CHANNELS = 'UsersChannels'
PRIVATE = 1
PUBLIC = 0
| 13.84375 | 46 | 0.665914 |
TEXT = 0
IMAGE = 1
AUDIO = 2
VIDEO = 3
DOCUMENT = 4
MESSAGE_TYPES = {
TEXT,
IMAGE,
AUDIO,
VIDEO,
DOCUMENT,
}
CHANNELS = "Channels"
CHATS = "Chats"
USERS_CHATS = 'UsersChats'
USERS_CHANNELS = 'UsersChannels'
PRIVATE = 1
PUBLIC = 0
| true | true |
f71cf9824ffb509cab55ce293165655a7f35f31c | 1,730 | py | Python | code/extras/highway_layer.py | vamships/RelationPrediction | 45f48e8d09331e7244a7fe8d2d9d0fefa7e1f76b | [
"MIT"
] | 376 | 2017-09-10T14:29:16.000Z | 2022-03-17T04:01:53.000Z | code/extras/highway_layer.py | vamships/RelationPrediction | 45f48e8d09331e7244a7fe8d2d9d0fefa7e1f76b | [
"MIT"
] | 15 | 2018-07-28T23:44:53.000Z | 2021-08-21T17:33:55.000Z | code/extras/highway_layer.py | vamships/RelationPrediction | 45f48e8d09331e7244a7fe8d2d9d0fefa7e1f76b | [
"MIT"
] | 104 | 2017-11-20T13:50:04.000Z | 2022-03-31T14:30:47.000Z | import numpy as np
import tensorflow as tf
from model import Model
from common.shared_functions import glorot_variance, make_tf_variable, make_tf_bias
class HighwayLayer(Model):
vertex_embedding_function = {'train': None, 'test': None}
def __init__(self, shape, next_component=None, next_component_2=None):
self.next_component = next_component
self.next_component_2 = next_component_2
self.shape = shape
def compute_vertex_embeddings(self, mode='train'):
if self.vertex_embedding_function[mode] is None:
code_1 = self.next_component.get_all_codes(mode=mode)[0]
code_2 = self.next_component_2.get_all_codes(mode=mode)[0]
gates = self.get_gates(mode=mode)
self.vertex_embedding_function[mode] = gates * code_1 + (1-gates) * code_2
return self.vertex_embedding_function[mode]
def local_initialize_train(self):
variance = glorot_variance(self.shape)
self.W = make_tf_variable(0, variance, self.shape)
self.b = make_tf_bias(self.shape[1], init=1)
def local_get_weights(self):
return [self.W, self.b]
def get_gates(self, mode='train'):
code = self.next_component_2.get_all_codes(mode=mode)[0]
hidden = tf.matmul(code, self.W) + self.b
return tf.nn.sigmoid(hidden)
def get_all_codes(self, mode='train'):
collected_messages = self.compute_vertex_embeddings(mode=mode)
return collected_messages, None, collected_messages
def get_all_subject_codes(self, mode='train'):
return self.compute_vertex_embeddings(mode=mode)
def get_all_object_codes(self, mode='train'):
return self.compute_vertex_embeddings(mode=mode)
| 34.6 | 86 | 0.701734 | import numpy as np
import tensorflow as tf
from model import Model
from common.shared_functions import glorot_variance, make_tf_variable, make_tf_bias
class HighwayLayer(Model):
vertex_embedding_function = {'train': None, 'test': None}
def __init__(self, shape, next_component=None, next_component_2=None):
self.next_component = next_component
self.next_component_2 = next_component_2
self.shape = shape
def compute_vertex_embeddings(self, mode='train'):
if self.vertex_embedding_function[mode] is None:
code_1 = self.next_component.get_all_codes(mode=mode)[0]
code_2 = self.next_component_2.get_all_codes(mode=mode)[0]
gates = self.get_gates(mode=mode)
self.vertex_embedding_function[mode] = gates * code_1 + (1-gates) * code_2
return self.vertex_embedding_function[mode]
def local_initialize_train(self):
variance = glorot_variance(self.shape)
self.W = make_tf_variable(0, variance, self.shape)
self.b = make_tf_bias(self.shape[1], init=1)
def local_get_weights(self):
return [self.W, self.b]
def get_gates(self, mode='train'):
code = self.next_component_2.get_all_codes(mode=mode)[0]
hidden = tf.matmul(code, self.W) + self.b
return tf.nn.sigmoid(hidden)
def get_all_codes(self, mode='train'):
collected_messages = self.compute_vertex_embeddings(mode=mode)
return collected_messages, None, collected_messages
def get_all_subject_codes(self, mode='train'):
return self.compute_vertex_embeddings(mode=mode)
def get_all_object_codes(self, mode='train'):
return self.compute_vertex_embeddings(mode=mode)
| true | true |
f71cfa170d9e79942034aff33de0c5092954646d | 668 | py | Python | etc/check_fonts.py | UO-CIS211/panels | a97e814a44244cb53cbed8165056f3df69a3541e | [
"MIT"
] | 1 | 2019-03-20T18:18:12.000Z | 2019-03-20T18:18:12.000Z | etc/check_fonts.py | UO-CIS211/panels | a97e814a44244cb53cbed8165056f3df69a3541e | [
"MIT"
] | null | null | null | etc/check_fonts.py | UO-CIS211/panels | a97e814a44244cb53cbed8165056f3df69a3541e | [
"MIT"
] | null | null | null | #
# Utility to check availability and location of fonts
# for pygame
#
import pygame
pygame.font.init() # Required or SysFont will break
candidates = [
"Helvetica",
"helvetica",
# "helvetica.ttf",
"Avenir Next",
"AvenirNext"
]
default = pygame.font.get_default_font()
print("System default font is '{}'".format(default))
for can in candidates:
path = pygame.font.match_font(can)
sysfont = pygame.font.SysFont(can, 12) # Breaks
print("{} => {}".format(can, path))
print("Sysfont {} => {}".format(can,sysfont))
print("Found fonts:")
fonts = pygame.font.get_fonts()
for font in fonts:
print("-- {}".format(font))
| 20.242424 | 53 | 0.642216 |
import pygame
pygame.font.init()
candidates = [
"Helvetica",
"helvetica",
"Avenir Next",
"AvenirNext"
]
default = pygame.font.get_default_font()
print("System default font is '{}'".format(default))
for can in candidates:
path = pygame.font.match_font(can)
sysfont = pygame.font.SysFont(can, 12)
print("{} => {}".format(can, path))
print("Sysfont {} => {}".format(can,sysfont))
print("Found fonts:")
fonts = pygame.font.get_fonts()
for font in fonts:
print("-- {}".format(font))
| true | true |
f71cfa2946ae7d25b13601e4e5fd1d6d17827f18 | 3,823 | py | Python | AutomatedTesting/Gem/PythonTests/Physics/tests/joints/Joints_Fixed2BodiesConstrained.py | whywhywhyw/o3de | 8e09f66799d4c8f188d45861d821e8656a554cb1 | [
"Apache-2.0",
"MIT"
] | 11 | 2021-07-08T09:58:26.000Z | 2022-03-17T17:59:26.000Z | AutomatedTesting/Gem/PythonTests/Physics/tests/joints/Joints_Fixed2BodiesConstrained.py | RoddieKieley/o3de | e804fd2a4241b039a42d9fa54eaae17dc94a7a92 | [
"Apache-2.0",
"MIT"
] | 29 | 2021-07-06T19:33:52.000Z | 2022-03-22T10:27:49.000Z | AutomatedTesting/Gem/PythonTests/Physics/tests/joints/Joints_Fixed2BodiesConstrained.py | RoddieKieley/o3de | e804fd2a4241b039a42d9fa54eaae17dc94a7a92 | [
"Apache-2.0",
"MIT"
] | 4 | 2021-07-06T19:24:43.000Z | 2022-03-31T12:42:27.000Z | """
Copyright (c) Contributors to the Open 3D Engine Project.
For complete copyright and license terms please see the LICENSE at the root of this distribution.
SPDX-License-Identifier: Apache-2.0 OR MIT
"""
# Test case ID : C18243580
# Test Case Title : Check that fixed joint constrains 2 bodies
# fmt: off
class Tests:
enter_game_mode = ("Entered game mode", "Failed to enter game mode")
exit_game_mode = ("Exited game mode", "Couldn't exit game mode")
lead_found = ("Found lead", "Did not find lead")
follower_found = ("Found follower", "Did not find follower")
check_lead_position = ("Lead moved in X direction", "Lead did not move in X direction")
check_follower_position = ("Follower moved in X direction", "Follower did not move in X direction")
# fmt: on
def Joints_Fixed2BodiesConstrained():
"""
Summary: Check that fixed joint constrains 2 bodies
Level Description:
lead - Starts above follower entity
follower - Starts below lead entity. Constrained to lead entity with fixed joint. Starts with initial velocity of (5, 0, 0) in positive X direction.
Expected Behavior: The follower entity moves in the positive X direction and the lead entity is dragged along towards the positive X direction.
The x position of the lead entity is incremented from its original.
Test Steps:
1) Open Level
2) Enter Game Mode
3) Create and Validate Entities
4) Wait for several seconds
5) Check to see if lead entity and follower entity moved in positive X direction.
6) Exit Game Mode
7) Close Editor
Note:
- This test file must be called from the Open 3D Engine Editor command terminal
- Any passed and failed tests are written to the Editor.log file.
Parsing the file or running a log_monitor are required to observe the test results.
:return: None
"""
import os
import sys
from editor_python_test_tools.utils import Report
from editor_python_test_tools.utils import TestHelper as helper
import azlmbr.legacy.general as general
import azlmbr.bus
from JointsHelper import JointEntity
# Helper Entity class
class Entity(JointEntity):
def criticalEntityFound(self): # Override function to use local Test dictionary
Report.critical_result(Tests.__dict__[self.name + "_found"], self.id.isValid())
# Main Script
helper.init_idle()
# 1) Open Level
helper.open_level("Physics", "Joints_Fixed2BodiesConstrained")
# 2) Enter Game Mode
helper.enter_game_mode(Tests.enter_game_mode)
# 3) Create and Validate Entities
lead = Entity("lead")
follower = Entity("follower")
Report.info_vector3(lead.position, "lead initial position:")
Report.info_vector3(follower.position, "follower initial position:")
leadInitialPosition = lead.position.x
followerInitialPosition = follower.position.x
# 4) Wait for several seconds
general.idle_wait(1.0) # wait for lead and follower to move
# 5) Check to see if lead entity and follower entity moved in positive X direction.
Report.info_vector3(lead.position, "lead position after 1 second:")
Report.info_vector3(follower.position, "follower position after 1 second:")
Report.critical_result(Tests.check_lead_position, lead.position.x > leadInitialPosition)
Report.critical_result(Tests.check_follower_position, follower.position.x > followerInitialPosition)
# 6) Exit Game Mode
helper.exit_game_mode(Tests.exit_game_mode)
if __name__ == "__main__":
from editor_python_test_tools.utils import Report
Report.start_test(Joints_Fixed2BodiesConstrained)
| 38.616162 | 152 | 0.699974 |
class Tests:
enter_game_mode = ("Entered game mode", "Failed to enter game mode")
exit_game_mode = ("Exited game mode", "Couldn't exit game mode")
lead_found = ("Found lead", "Did not find lead")
follower_found = ("Found follower", "Did not find follower")
check_lead_position = ("Lead moved in X direction", "Lead did not move in X direction")
check_follower_position = ("Follower moved in X direction", "Follower did not move in X direction")
# fmt: on
def Joints_Fixed2BodiesConstrained():
import os
import sys
from editor_python_test_tools.utils import Report
from editor_python_test_tools.utils import TestHelper as helper
import azlmbr.legacy.general as general
import azlmbr.bus
from JointsHelper import JointEntity
# Helper Entity class
class Entity(JointEntity):
def criticalEntityFound(self): # Override function to use local Test dictionary
Report.critical_result(Tests.__dict__[self.name + "_found"], self.id.isValid())
# Main Script
helper.init_idle()
# 1) Open Level
helper.open_level("Physics", "Joints_Fixed2BodiesConstrained")
# 2) Enter Game Mode
helper.enter_game_mode(Tests.enter_game_mode)
# 3) Create and Validate Entities
lead = Entity("lead")
follower = Entity("follower")
Report.info_vector3(lead.position, "lead initial position:")
Report.info_vector3(follower.position, "follower initial position:")
leadInitialPosition = lead.position.x
followerInitialPosition = follower.position.x
# 4) Wait for several seconds
general.idle_wait(1.0) # wait for lead and follower to move
# 5) Check to see if lead entity and follower entity moved in positive X direction.
Report.info_vector3(lead.position, "lead position after 1 second:")
Report.info_vector3(follower.position, "follower position after 1 second:")
Report.critical_result(Tests.check_lead_position, lead.position.x > leadInitialPosition)
Report.critical_result(Tests.check_follower_position, follower.position.x > followerInitialPosition)
# 6) Exit Game Mode
helper.exit_game_mode(Tests.exit_game_mode)
if __name__ == "__main__":
from editor_python_test_tools.utils import Report
Report.start_test(Joints_Fixed2BodiesConstrained)
| true | true |
f71cfa61b2b9cce8bfbbb52298ff209aec5dcc32 | 126 | py | Python | tests/regression/RandomReg_100/ws_RandomReg_100_Ridge_sqlite_code_gen.py | antoinecarme/sklearn2sql_heroku | d680db10683daa419324461eeea851dd8b103ad5 | [
"BSD-3-Clause"
] | 1 | 2019-07-09T14:45:18.000Z | 2019-07-09T14:45:18.000Z | tests/regression/RandomReg_100/ws_RandomReg_100_Ridge_sqlite_code_gen.py | antoinecarme/sklearn2sql_heroku | d680db10683daa419324461eeea851dd8b103ad5 | [
"BSD-3-Clause"
] | 5 | 2017-11-13T13:35:37.000Z | 2021-11-11T12:57:20.000Z | tests/regression/RandomReg_100/ws_RandomReg_100_Ridge_sqlite_code_gen.py | antoinecarme/sklearn2sql_heroku | d680db10683daa419324461eeea851dd8b103ad5 | [
"BSD-3-Clause"
] | 1 | 2021-09-19T15:05:33.000Z | 2021-09-19T15:05:33.000Z | from sklearn2sql_heroku.tests.regression import generic as reg_gen
reg_gen.test_model("Ridge" , "RandomReg_100" , "sqlite")
| 25.2 | 66 | 0.793651 | from sklearn2sql_heroku.tests.regression import generic as reg_gen
reg_gen.test_model("Ridge" , "RandomReg_100" , "sqlite")
| true | true |
f71cfab1efdc6e0bd803f231c2d34fb3ee25c532 | 14,305 | py | Python | tests/providers/hashicorp/secrets/test_vault.py | emilioego/airflow | 3457c7847cd24413ff5b622e65c27d8370f94502 | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 79 | 2021-10-15T07:32:27.000Z | 2022-03-28T04:10:19.000Z | tests/providers/hashicorp/secrets/test_vault.py | emilioego/airflow | 3457c7847cd24413ff5b622e65c27d8370f94502 | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 153 | 2021-10-15T05:23:46.000Z | 2022-02-23T06:07:10.000Z | tests/providers/hashicorp/secrets/test_vault.py | emilioego/airflow | 3457c7847cd24413ff5b622e65c27d8370f94502 | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 23 | 2021-10-15T02:36:37.000Z | 2022-03-17T02:59:27.000Z | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from unittest import TestCase, mock
from hvac.exceptions import InvalidPath, VaultError
from airflow.providers.hashicorp.secrets.vault import VaultBackend
class TestVaultSecrets(TestCase):
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_get_conn_uri(self, mock_hvac):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_client.secrets.kv.v2.read_secret_version.return_value = {
'request_id': '94011e25-f8dc-ec29-221b-1f9c1d9ad2ae',
'lease_id': '',
'renewable': False,
'lease_duration': 0,
'data': {
'data': {'conn_uri': 'postgresql://airflow:airflow@host:5432/airflow'},
'metadata': {
'created_time': '2020-03-16T21:01:43.331126Z',
'deletion_time': '',
'destroyed': False,
'version': 1,
},
},
'wrap_info': None,
'warnings': None,
'auth': None,
}
kwargs = {
"connections_path": "connections",
"mount_point": "airflow",
"auth_type": "token",
"url": "http://127.0.0.1:8200",
"token": "s.7AU0I51yv1Q1lxOIg1F3ZRAS",
}
test_client = VaultBackend(**kwargs)
returned_uri = test_client.get_conn_uri(conn_id="test_postgres")
self.assertEqual('postgresql://airflow:airflow@host:5432/airflow', returned_uri)
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_get_conn_uri_engine_version_1(self, mock_hvac):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_client.secrets.kv.v1.read_secret.return_value = {
'request_id': '182d0673-618c-9889-4cba-4e1f4cfe4b4b',
'lease_id': '',
'renewable': False,
'lease_duration': 2764800,
'data': {'conn_uri': 'postgresql://airflow:airflow@host:5432/airflow'},
'wrap_info': None,
'warnings': None,
'auth': None,
}
kwargs = {
"connections_path": "connections",
"mount_point": "airflow",
"auth_type": "token",
"url": "http://127.0.0.1:8200",
"token": "s.7AU0I51yv1Q1lxOIg1F3ZRAS",
"kv_engine_version": 1,
}
test_client = VaultBackend(**kwargs)
returned_uri = test_client.get_conn_uri(conn_id="test_postgres")
mock_client.secrets.kv.v1.read_secret.assert_called_once_with(
mount_point='airflow', path='connections/test_postgres'
)
self.assertEqual('postgresql://airflow:airflow@host:5432/airflow', returned_uri)
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_get_conn_uri_engine_version_1_custom_auth_mount_point(self, mock_hvac):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_client.secrets.kv.v1.read_secret.return_value = {
'request_id': '182d0673-618c-9889-4cba-4e1f4cfe4b4b',
'lease_id': '',
'renewable': False,
'lease_duration': 2764800,
'data': {'conn_uri': 'postgresql://airflow:airflow@host:5432/airflow'},
'wrap_info': None,
'warnings': None,
'auth': None,
}
kwargs = {
"connections_path": "connections",
"mount_point": "airflow",
"auth_mount_point": "custom",
"auth_type": "token",
"url": "http://127.0.0.1:8200",
"token": "s.7AU0I51yv1Q1lxOIg1F3ZRAS",
"kv_engine_version": 1,
}
test_client = VaultBackend(**kwargs)
self.assertEqual("custom", test_client.vault_client.auth_mount_point)
returned_uri = test_client.get_conn_uri(conn_id="test_postgres")
mock_client.secrets.kv.v1.read_secret.assert_called_once_with(
mount_point='airflow', path='connections/test_postgres'
)
self.assertEqual('postgresql://airflow:airflow@host:5432/airflow', returned_uri)
@mock.patch.dict(
'os.environ',
{
'AIRFLOW_CONN_TEST_MYSQL': 'mysql://airflow:airflow@host:5432/airflow',
},
)
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_get_conn_uri_non_existent_key(self, mock_hvac):
"""
Test that if the key with connection ID is not present in Vault, _VaultClient.get_connections
should return None
"""
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
# Response does not contain the requested key
mock_client.secrets.kv.v2.read_secret_version.side_effect = InvalidPath()
kwargs = {
"connections_path": "connections",
"mount_point": "airflow",
"auth_type": "token",
"url": "http://127.0.0.1:8200",
"token": "s.7AU0I51yv1Q1lxOIg1F3ZRAS",
}
test_client = VaultBackend(**kwargs)
self.assertIsNone(test_client.get_conn_uri(conn_id="test_mysql"))
mock_client.secrets.kv.v2.read_secret_version.assert_called_once_with(
mount_point='airflow', path='connections/test_mysql', version=None
)
self.assertEqual([], test_client.get_connections(conn_id="test_mysql"))
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_get_variable_value(self, mock_hvac):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_client.secrets.kv.v2.read_secret_version.return_value = {
'request_id': '2d48a2ad-6bcb-e5b6-429d-da35fdf31f56',
'lease_id': '',
'renewable': False,
'lease_duration': 0,
'data': {
'data': {'value': 'world'},
'metadata': {
'created_time': '2020-03-28T02:10:54.301784Z',
'deletion_time': '',
'destroyed': False,
'version': 1,
},
},
'wrap_info': None,
'warnings': None,
'auth': None,
}
kwargs = {
"variables_path": "variables",
"mount_point": "airflow",
"auth_type": "token",
"url": "http://127.0.0.1:8200",
"token": "s.7AU0I51yv1Q1lxOIg1F3ZRAS",
}
test_client = VaultBackend(**kwargs)
returned_uri = test_client.get_variable("hello")
self.assertEqual('world', returned_uri)
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_get_variable_value_engine_version_1(self, mock_hvac):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_client.secrets.kv.v1.read_secret.return_value = {
'request_id': '182d0673-618c-9889-4cba-4e1f4cfe4b4b',
'lease_id': '',
'renewable': False,
'lease_duration': 2764800,
'data': {'value': 'world'},
'wrap_info': None,
'warnings': None,
'auth': None,
}
kwargs = {
"variables_path": "variables",
"mount_point": "airflow",
"auth_type": "token",
"url": "http://127.0.0.1:8200",
"token": "s.7AU0I51yv1Q1lxOIg1F3ZRAS",
"kv_engine_version": 1,
}
test_client = VaultBackend(**kwargs)
returned_uri = test_client.get_variable("hello")
mock_client.secrets.kv.v1.read_secret.assert_called_once_with(
mount_point='airflow', path='variables/hello'
)
self.assertEqual('world', returned_uri)
@mock.patch.dict(
'os.environ',
{
'AIRFLOW_VAR_HELLO': 'world',
},
)
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_get_variable_value_non_existent_key(self, mock_hvac):
"""
Test that if the key with connection ID is not present in Vault, _VaultClient.get_connections
should return None
"""
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
# Response does not contain the requested key
mock_client.secrets.kv.v2.read_secret_version.side_effect = InvalidPath()
kwargs = {
"variables_path": "variables",
"mount_point": "airflow",
"auth_type": "token",
"url": "http://127.0.0.1:8200",
"token": "s.7AU0I51yv1Q1lxOIg1F3ZRAS",
}
test_client = VaultBackend(**kwargs)
self.assertIsNone(test_client.get_variable("hello"))
mock_client.secrets.kv.v2.read_secret_version.assert_called_once_with(
mount_point='airflow', path='variables/hello', version=None
)
self.assertIsNone(test_client.get_variable("hello"))
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_auth_failure_raises_error(self, mock_hvac):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_client.is_authenticated.return_value = False
kwargs = {
"connections_path": "connections",
"mount_point": "airflow",
"auth_type": "token",
"url": "http://127.0.0.1:8200",
"token": "test_wrong_token",
}
with self.assertRaisesRegex(VaultError, "Vault Authentication Error!"):
VaultBackend(**kwargs).get_connections(conn_id='test')
def test_auth_type_kubernetes_with_unreadable_jwt_raises_error(self):
path = "/var/tmp/this_does_not_exist/334e918ef11987d3ef2f9553458ea09f"
kwargs = {
"auth_type": "kubernetes",
"kubernetes_role": "default",
"kubernetes_jwt_path": path,
"url": "http://127.0.0.1:8200",
}
with self.assertRaisesRegex(FileNotFoundError, path):
VaultBackend(**kwargs).get_connections(conn_id='test')
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_get_config_value(self, mock_hvac):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_client.secrets.kv.v2.read_secret_version.return_value = {
'request_id': '2d48a2ad-6bcb-e5b6-429d-da35fdf31f56',
'lease_id': '',
'renewable': False,
'lease_duration': 0,
'data': {
'data': {'value': 'sqlite:////Users/airflow/airflow/airflow.db'},
'metadata': {
'created_time': '2020-03-28T02:10:54.301784Z',
'deletion_time': '',
'destroyed': False,
'version': 1,
},
},
'wrap_info': None,
'warnings': None,
'auth': None,
}
kwargs = {
"configs_path": "configurations",
"mount_point": "secret",
"auth_type": "token",
"url": "http://127.0.0.1:8200",
"token": "s.FnL7qg0YnHZDpf4zKKuFy0UK",
}
test_client = VaultBackend(**kwargs)
returned_uri = test_client.get_config("sql_alchemy_conn")
self.assertEqual('sqlite:////Users/airflow/airflow/airflow.db', returned_uri)
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_connections_path_none_value(self, mock_hvac):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
kwargs = {
"connections_path": None,
"mount_point": "airflow",
"auth_type": "token",
"url": "http://127.0.0.1:8200",
"token": "s.FnL7qg0YnHZDpf4zKKuFy0UK",
}
test_client = VaultBackend(**kwargs)
self.assertIsNone(test_client.get_conn_uri(conn_id="test"))
mock_hvac.Client.assert_not_called()
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_variables_path_none_value(self, mock_hvac):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
kwargs = {
"variables_path": None,
"mount_point": "airflow",
"auth_type": "token",
"url": "http://127.0.0.1:8200",
"token": "s.FnL7qg0YnHZDpf4zKKuFy0UK",
}
test_client = VaultBackend(**kwargs)
self.assertIsNone(test_client.get_variable("hello"))
mock_hvac.Client.assert_not_called()
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_config_path_none_value(self, mock_hvac):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
kwargs = {
"config_path": None,
"mount_point": "airflow",
"auth_type": "token",
"url": "http://127.0.0.1:8200",
"token": "s.FnL7qg0YnHZDpf4zKKuFy0UK",
}
test_client = VaultBackend(**kwargs)
self.assertIsNone(test_client.get_config("test"))
mock_hvac.Client.assert_not_called()
| 38.766938 | 101 | 0.602517 |
from unittest import TestCase, mock
from hvac.exceptions import InvalidPath, VaultError
from airflow.providers.hashicorp.secrets.vault import VaultBackend
class TestVaultSecrets(TestCase):
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_get_conn_uri(self, mock_hvac):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_client.secrets.kv.v2.read_secret_version.return_value = {
'request_id': '94011e25-f8dc-ec29-221b-1f9c1d9ad2ae',
'lease_id': '',
'renewable': False,
'lease_duration': 0,
'data': {
'data': {'conn_uri': 'postgresql://airflow:airflow@host:5432/airflow'},
'metadata': {
'created_time': '2020-03-16T21:01:43.331126Z',
'deletion_time': '',
'destroyed': False,
'version': 1,
},
},
'wrap_info': None,
'warnings': None,
'auth': None,
}
kwargs = {
"connections_path": "connections",
"mount_point": "airflow",
"auth_type": "token",
"url": "http://127.0.0.1:8200",
"token": "s.7AU0I51yv1Q1lxOIg1F3ZRAS",
}
test_client = VaultBackend(**kwargs)
returned_uri = test_client.get_conn_uri(conn_id="test_postgres")
self.assertEqual('postgresql://airflow:airflow@host:5432/airflow', returned_uri)
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_get_conn_uri_engine_version_1(self, mock_hvac):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_client.secrets.kv.v1.read_secret.return_value = {
'request_id': '182d0673-618c-9889-4cba-4e1f4cfe4b4b',
'lease_id': '',
'renewable': False,
'lease_duration': 2764800,
'data': {'conn_uri': 'postgresql://airflow:airflow@host:5432/airflow'},
'wrap_info': None,
'warnings': None,
'auth': None,
}
kwargs = {
"connections_path": "connections",
"mount_point": "airflow",
"auth_type": "token",
"url": "http://127.0.0.1:8200",
"token": "s.7AU0I51yv1Q1lxOIg1F3ZRAS",
"kv_engine_version": 1,
}
test_client = VaultBackend(**kwargs)
returned_uri = test_client.get_conn_uri(conn_id="test_postgres")
mock_client.secrets.kv.v1.read_secret.assert_called_once_with(
mount_point='airflow', path='connections/test_postgres'
)
self.assertEqual('postgresql://airflow:airflow@host:5432/airflow', returned_uri)
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_get_conn_uri_engine_version_1_custom_auth_mount_point(self, mock_hvac):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_client.secrets.kv.v1.read_secret.return_value = {
'request_id': '182d0673-618c-9889-4cba-4e1f4cfe4b4b',
'lease_id': '',
'renewable': False,
'lease_duration': 2764800,
'data': {'conn_uri': 'postgresql://airflow:airflow@host:5432/airflow'},
'wrap_info': None,
'warnings': None,
'auth': None,
}
kwargs = {
"connections_path": "connections",
"mount_point": "airflow",
"auth_mount_point": "custom",
"auth_type": "token",
"url": "http://127.0.0.1:8200",
"token": "s.7AU0I51yv1Q1lxOIg1F3ZRAS",
"kv_engine_version": 1,
}
test_client = VaultBackend(**kwargs)
self.assertEqual("custom", test_client.vault_client.auth_mount_point)
returned_uri = test_client.get_conn_uri(conn_id="test_postgres")
mock_client.secrets.kv.v1.read_secret.assert_called_once_with(
mount_point='airflow', path='connections/test_postgres'
)
self.assertEqual('postgresql://airflow:airflow@host:5432/airflow', returned_uri)
@mock.patch.dict(
'os.environ',
{
'AIRFLOW_CONN_TEST_MYSQL': 'mysql://airflow:airflow@host:5432/airflow',
},
)
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_get_conn_uri_non_existent_key(self, mock_hvac):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_client.secrets.kv.v2.read_secret_version.side_effect = InvalidPath()
kwargs = {
"connections_path": "connections",
"mount_point": "airflow",
"auth_type": "token",
"url": "http://127.0.0.1:8200",
"token": "s.7AU0I51yv1Q1lxOIg1F3ZRAS",
}
test_client = VaultBackend(**kwargs)
self.assertIsNone(test_client.get_conn_uri(conn_id="test_mysql"))
mock_client.secrets.kv.v2.read_secret_version.assert_called_once_with(
mount_point='airflow', path='connections/test_mysql', version=None
)
self.assertEqual([], test_client.get_connections(conn_id="test_mysql"))
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_get_variable_value(self, mock_hvac):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_client.secrets.kv.v2.read_secret_version.return_value = {
'request_id': '2d48a2ad-6bcb-e5b6-429d-da35fdf31f56',
'lease_id': '',
'renewable': False,
'lease_duration': 0,
'data': {
'data': {'value': 'world'},
'metadata': {
'created_time': '2020-03-28T02:10:54.301784Z',
'deletion_time': '',
'destroyed': False,
'version': 1,
},
},
'wrap_info': None,
'warnings': None,
'auth': None,
}
kwargs = {
"variables_path": "variables",
"mount_point": "airflow",
"auth_type": "token",
"url": "http://127.0.0.1:8200",
"token": "s.7AU0I51yv1Q1lxOIg1F3ZRAS",
}
test_client = VaultBackend(**kwargs)
returned_uri = test_client.get_variable("hello")
self.assertEqual('world', returned_uri)
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_get_variable_value_engine_version_1(self, mock_hvac):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_client.secrets.kv.v1.read_secret.return_value = {
'request_id': '182d0673-618c-9889-4cba-4e1f4cfe4b4b',
'lease_id': '',
'renewable': False,
'lease_duration': 2764800,
'data': {'value': 'world'},
'wrap_info': None,
'warnings': None,
'auth': None,
}
kwargs = {
"variables_path": "variables",
"mount_point": "airflow",
"auth_type": "token",
"url": "http://127.0.0.1:8200",
"token": "s.7AU0I51yv1Q1lxOIg1F3ZRAS",
"kv_engine_version": 1,
}
test_client = VaultBackend(**kwargs)
returned_uri = test_client.get_variable("hello")
mock_client.secrets.kv.v1.read_secret.assert_called_once_with(
mount_point='airflow', path='variables/hello'
)
self.assertEqual('world', returned_uri)
@mock.patch.dict(
'os.environ',
{
'AIRFLOW_VAR_HELLO': 'world',
},
)
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_get_variable_value_non_existent_key(self, mock_hvac):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_client.secrets.kv.v2.read_secret_version.side_effect = InvalidPath()
kwargs = {
"variables_path": "variables",
"mount_point": "airflow",
"auth_type": "token",
"url": "http://127.0.0.1:8200",
"token": "s.7AU0I51yv1Q1lxOIg1F3ZRAS",
}
test_client = VaultBackend(**kwargs)
self.assertIsNone(test_client.get_variable("hello"))
mock_client.secrets.kv.v2.read_secret_version.assert_called_once_with(
mount_point='airflow', path='variables/hello', version=None
)
self.assertIsNone(test_client.get_variable("hello"))
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_auth_failure_raises_error(self, mock_hvac):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_client.is_authenticated.return_value = False
kwargs = {
"connections_path": "connections",
"mount_point": "airflow",
"auth_type": "token",
"url": "http://127.0.0.1:8200",
"token": "test_wrong_token",
}
with self.assertRaisesRegex(VaultError, "Vault Authentication Error!"):
VaultBackend(**kwargs).get_connections(conn_id='test')
def test_auth_type_kubernetes_with_unreadable_jwt_raises_error(self):
path = "/var/tmp/this_does_not_exist/334e918ef11987d3ef2f9553458ea09f"
kwargs = {
"auth_type": "kubernetes",
"kubernetes_role": "default",
"kubernetes_jwt_path": path,
"url": "http://127.0.0.1:8200",
}
with self.assertRaisesRegex(FileNotFoundError, path):
VaultBackend(**kwargs).get_connections(conn_id='test')
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_get_config_value(self, mock_hvac):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_client.secrets.kv.v2.read_secret_version.return_value = {
'request_id': '2d48a2ad-6bcb-e5b6-429d-da35fdf31f56',
'lease_id': '',
'renewable': False,
'lease_duration': 0,
'data': {
'data': {'value': 'sqlite:////Users/airflow/airflow/airflow.db'},
'metadata': {
'created_time': '2020-03-28T02:10:54.301784Z',
'deletion_time': '',
'destroyed': False,
'version': 1,
},
},
'wrap_info': None,
'warnings': None,
'auth': None,
}
kwargs = {
"configs_path": "configurations",
"mount_point": "secret",
"auth_type": "token",
"url": "http://127.0.0.1:8200",
"token": "s.FnL7qg0YnHZDpf4zKKuFy0UK",
}
test_client = VaultBackend(**kwargs)
returned_uri = test_client.get_config("sql_alchemy_conn")
self.assertEqual('sqlite:////Users/airflow/airflow/airflow.db', returned_uri)
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_connections_path_none_value(self, mock_hvac):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
kwargs = {
"connections_path": None,
"mount_point": "airflow",
"auth_type": "token",
"url": "http://127.0.0.1:8200",
"token": "s.FnL7qg0YnHZDpf4zKKuFy0UK",
}
test_client = VaultBackend(**kwargs)
self.assertIsNone(test_client.get_conn_uri(conn_id="test"))
mock_hvac.Client.assert_not_called()
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_variables_path_none_value(self, mock_hvac):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
kwargs = {
"variables_path": None,
"mount_point": "airflow",
"auth_type": "token",
"url": "http://127.0.0.1:8200",
"token": "s.FnL7qg0YnHZDpf4zKKuFy0UK",
}
test_client = VaultBackend(**kwargs)
self.assertIsNone(test_client.get_variable("hello"))
mock_hvac.Client.assert_not_called()
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
def test_config_path_none_value(self, mock_hvac):
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
kwargs = {
"config_path": None,
"mount_point": "airflow",
"auth_type": "token",
"url": "http://127.0.0.1:8200",
"token": "s.FnL7qg0YnHZDpf4zKKuFy0UK",
}
test_client = VaultBackend(**kwargs)
self.assertIsNone(test_client.get_config("test"))
mock_hvac.Client.assert_not_called()
| true | true |
f71cfb79c7c2c9361f5f9e6f721e707abbbcb15a | 16,113 | py | Python | qa/rpc-tests/p2p-fullblocktest.py | mirzaei-ce/core-alisinabit | 9929923df19fc9f03eb02fa056f325c9a284cfcf | [
"MIT"
] | null | null | null | qa/rpc-tests/p2p-fullblocktest.py | mirzaei-ce/core-alisinabit | 9929923df19fc9f03eb02fa056f325c9a284cfcf | [
"MIT"
] | null | null | null | qa/rpc-tests/p2p-fullblocktest.py | mirzaei-ce/core-alisinabit | 9929923df19fc9f03eb02fa056f325c9a284cfcf | [
"MIT"
] | null | null | null | #!/usr/bin/env python2
#
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
from test_framework.test_framework import ComparisonTestFramework
from test_framework.util import *
from test_framework.comptool import TestManager, TestInstance, RejectResult
from test_framework.blocktools import *
import time
from test_framework.key import CECKey
from test_framework.script import CScript, SignatureHash, SIGHASH_ALL, OP_TRUE, OP_FALSE
class PreviousSpendableOutput(object):
def __init__(self, tx = CTransaction(), n = -1):
self.tx = tx
self.n = n # the output we're spending
'''
This reimplements tests from the alisinabitj/FullBlockTestGenerator used
by the pull-tester.
We use the testing framework in which we expect a particular answer from
each test.
'''
class FullBlockTest(ComparisonTestFramework):
''' Can either run this test as 1 node with expected answers, or two and compare them.
Change the "outcome" variable from each TestInstance object to only do the comparison. '''
def __init__(self):
self.num_nodes = 1
self.block_heights = {}
self.coinbase_key = CECKey()
self.coinbase_key.set_secretbytes(bytes("horsebattery"))
self.coinbase_pubkey = self.coinbase_key.get_pubkey()
self.block_time = int(time.time())+1
self.tip = None
self.blocks = {}
def run_test(self):
test = TestManager(self, self.options.tmpdir)
test.add_all_connections(self.nodes)
NetworkThread().start() # Start up network handling in another thread
test.run()
def add_transactions_to_block(self, block, tx_list):
[ tx.rehash() for tx in tx_list ]
block.vtx.extend(tx_list)
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
return block
# Create a block on top of self.tip, and advance self.tip to point to the new block
# if spend is specified, then 1 satoshi will be spent from that to an anyone-can-spend output,
# and rest will go to fees.
def next_block(self, number, spend=None, additional_coinbase_value=0, script=None):
if self.tip == None:
base_block_hash = self.genesis_hash
else:
base_block_hash = self.tip.sha256
# First create the coinbase
height = self.block_heights[base_block_hash] + 1
coinbase = create_coinbase(height, self.coinbase_pubkey)
coinbase.vout[0].nValue += additional_coinbase_value
if (spend != None):
coinbase.vout[0].nValue += spend.tx.vout[spend.n].nValue - 1 # all but one satoshi to fees
coinbase.rehash()
block = create_block(base_block_hash, coinbase, self.block_time)
if (spend != None):
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(spend.tx.sha256, spend.n), "", 0xffffffff)) # no signature yet
# This copies the java comparison tool testing behavior: the first
# txout has a garbage scriptPubKey, "to make sure we're not
# pre-verifying too much" (?)
tx.vout.append(CTxOut(0, CScript([random.randint(0,255), height & 255])))
if script == None:
tx.vout.append(CTxOut(1, CScript([OP_TRUE])))
else:
tx.vout.append(CTxOut(1, script))
# Now sign it if necessary
scriptSig = ""
scriptPubKey = bytearray(spend.tx.vout[spend.n].scriptPubKey)
if (scriptPubKey[0] == OP_TRUE): # looks like an anyone-can-spend
scriptSig = CScript([OP_TRUE])
else:
# We have to actually sign it
(sighash, err) = SignatureHash(spend.tx.vout[spend.n].scriptPubKey, tx, 0, SIGHASH_ALL)
scriptSig = CScript([self.coinbase_key.sign(sighash) + bytes(bytearray([SIGHASH_ALL]))])
tx.vin[0].scriptSig = scriptSig
# Now add the transaction to the block
block = self.add_transactions_to_block(block, [tx])
block.solve()
self.tip = block
self.block_heights[block.sha256] = height
self.block_time += 1
assert number not in self.blocks
self.blocks[number] = block
return block
def get_tests(self):
self.genesis_hash = int(self.nodes[0].getbestblockhash(), 16)
self.block_heights[self.genesis_hash] = 0
spendable_outputs = []
# save the current tip so it can be spent by a later block
def save_spendable_output():
spendable_outputs.append(self.tip)
# get an output that we previous marked as spendable
def get_spendable_output():
return PreviousSpendableOutput(spendable_outputs.pop(0).vtx[0], 0)
# returns a test case that asserts that the current tip was accepted
def accepted():
return TestInstance([[self.tip, True]])
# returns a test case that asserts that the current tip was rejected
def rejected(reject = None):
if reject is None:
return TestInstance([[self.tip, False]])
else:
return TestInstance([[self.tip, reject]])
# move the tip back to a previous block
def tip(number):
self.tip = self.blocks[number]
# add transactions to a block produced by next_block
def update_block(block_number, new_transactions):
block = self.blocks[block_number]
old_hash = block.sha256
self.add_transactions_to_block(block, new_transactions)
block.solve()
# Update the internal state just like in next_block
self.tip = block
self.block_heights[block.sha256] = self.block_heights[old_hash]
del self.block_heights[old_hash]
self.blocks[block_number] = block
return block
# creates a new block and advances the tip to that block
block = self.next_block
# Create a new block
block(0)
save_spendable_output()
yield accepted()
# Now we need that block to mature so we can spend the coinbase.
test = TestInstance(sync_every_block=False)
for i in range(99):
block(1000 + i)
test.blocks_and_transactions.append([self.tip, True])
save_spendable_output()
yield test
# Start by building a couple of blocks on top (which output is spent is
# in parentheses):
# genesis -> b1 (0) -> b2 (1)
out0 = get_spendable_output()
block(1, spend=out0)
save_spendable_output()
yield accepted()
out1 = get_spendable_output()
b2 = block(2, spend=out1)
yield accepted()
# so fork like this:
#
# genesis -> b1 (0) -> b2 (1)
# \-> b3 (1)
#
# Nothing should happen at this point. We saw b2 first so it takes priority.
tip(1)
b3 = block(3, spend=out1)
txout_b3 = PreviousSpendableOutput(b3.vtx[1], 1)
yield rejected()
# Now we add another block to make the alternative chain longer.
#
# genesis -> b1 (0) -> b2 (1)
# \-> b3 (1) -> b4 (2)
out2 = get_spendable_output()
block(4, spend=out2)
yield accepted()
# ... and back to the first chain.
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b3 (1) -> b4 (2)
tip(2)
block(5, spend=out2)
save_spendable_output()
yield rejected()
out3 = get_spendable_output()
block(6, spend=out3)
yield accepted()
# Try to create a fork that double-spends
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b7 (2) -> b8 (4)
# \-> b3 (1) -> b4 (2)
tip(5)
block(7, spend=out2)
yield rejected()
out4 = get_spendable_output()
block(8, spend=out4)
yield rejected()
# Try to create a block that has too much fee
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b9 (4)
# \-> b3 (1) -> b4 (2)
tip(6)
block(9, spend=out4, additional_coinbase_value=1)
yield rejected(RejectResult(16, 'bad-cb-amount'))
# Create a fork that ends in a block with too much fee (the one that causes the reorg)
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b10 (3) -> b11 (4)
# \-> b3 (1) -> b4 (2)
tip(5)
block(10, spend=out3)
yield rejected()
block(11, spend=out4, additional_coinbase_value=1)
yield rejected(RejectResult(16, 'bad-cb-amount'))
# Try again, but with a valid fork first
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b12 (3) -> b13 (4) -> b14 (5)
# (b12 added last)
# \-> b3 (1) -> b4 (2)
tip(5)
b12 = block(12, spend=out3)
save_spendable_output()
#yield TestInstance([[b12, False]])
b13 = block(13, spend=out4)
# Deliver the block header for b12, and the block b13.
# b13 should be accepted but the tip won't advance until b12 is delivered.
yield TestInstance([[CBlockHeader(b12), None], [b13, False]])
save_spendable_output()
out5 = get_spendable_output()
# b14 is invalid, but the node won't know that until it tries to connect
# Tip still can't advance because b12 is missing
block(14, spend=out5, additional_coinbase_value=1)
yield rejected()
yield TestInstance([[b12, True, b13.sha256]]) # New tip should be b13.
# Add a block with MAX_BLOCK_SIGOPS and one with one more sigop
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b12 (3) -> b13 (4) -> b15 (5) -> b16 (6)
# \-> b3 (1) -> b4 (2)
# Test that a block with a lot of checksigs is okay
lots_of_checksigs = CScript([OP_CHECKSIG] * (1000000 / 50 - 1))
tip(13)
block(15, spend=out5, script=lots_of_checksigs)
yield accepted()
# Test that a block with too many checksigs is rejected
out6 = get_spendable_output()
too_many_checksigs = CScript([OP_CHECKSIG] * (1000000 / 50))
block(16, spend=out6, script=too_many_checksigs)
yield rejected(RejectResult(16, 'bad-blk-sigops'))
# Attempt to spend a transaction created on a different fork
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b12 (3) -> b13 (4) -> b15 (5) -> b17 (b3.vtx[1])
# \-> b3 (1) -> b4 (2)
tip(15)
block(17, spend=txout_b3)
yield rejected(RejectResult(16, 'bad-txns-inputs-missingorspent'))
# Attempt to spend a transaction created on a different fork (on a fork this time)
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b12 (3) -> b13 (4) -> b15 (5)
# \-> b18 (b3.vtx[1]) -> b19 (6)
# \-> b3 (1) -> b4 (2)
tip(13)
block(18, spend=txout_b3)
yield rejected()
block(19, spend=out6)
yield rejected()
# Attempt to spend a coinbase at depth too low
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b12 (3) -> b13 (4) -> b15 (5) -> b20 (7)
# \-> b3 (1) -> b4 (2)
tip(15)
out7 = get_spendable_output()
block(20, spend=out7)
yield rejected(RejectResult(16, 'bad-txns-premature-spend-of-coinbase'))
# Attempt to spend a coinbase at depth too low (on a fork this time)
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b12 (3) -> b13 (4) -> b15 (5)
# \-> b21 (6) -> b22 (5)
# \-> b3 (1) -> b4 (2)
tip(13)
block(21, spend=out6)
yield rejected()
block(22, spend=out5)
yield rejected()
# Create a block on either side of MAX_BLOCK_SIZE and make sure its accepted/rejected
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b12 (3) -> b13 (4) -> b15 (5) -> b23 (6)
# \-> b24 (6) -> b25 (7)
# \-> b3 (1) -> b4 (2)
tip(15)
b23 = block(23, spend=out6)
old_hash = b23.sha256
tx = CTransaction()
script_length = MAX_BLOCK_SIZE - len(b23.serialize()) - 69
script_output = CScript([chr(0)*script_length])
tx.vout.append(CTxOut(0, script_output))
tx.vin.append(CTxIn(COutPoint(b23.vtx[1].sha256, 1)))
b23 = update_block(23, [tx])
# Make sure the math above worked out to produce a max-sized block
assert_equal(len(b23.serialize()), MAX_BLOCK_SIZE)
yield accepted()
# Make the next block one byte bigger and check that it fails
tip(15)
b24 = block(24, spend=out6)
script_length = MAX_BLOCK_SIZE - len(b24.serialize()) - 69
script_output = CScript([chr(0)*(script_length+1)])
tx.vout = [CTxOut(0, script_output)]
b24 = update_block(24, [tx])
assert_equal(len(b24.serialize()), MAX_BLOCK_SIZE+1)
yield rejected(RejectResult(16, 'bad-blk-length'))
b25 = block(25, spend=out7)
yield rejected()
# Create blocks with a coinbase input script size out of range
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b12 (3) -> b13 (4) -> b15 (5) -> b23 (6) -> b30 (7)
# \-> ... (6) -> ... (7)
# \-> b3 (1) -> b4 (2)
tip(15)
b26 = block(26, spend=out6)
b26.vtx[0].vin[0].scriptSig = chr(0)
b26.vtx[0].rehash()
# update_block causes the merkle root to get updated, even with no new
# transactions, and updates the required state.
b26 = update_block(26, [])
yield rejected(RejectResult(16, 'bad-cb-length'))
# Extend the b26 chain to make sure alisinabitd isn't accepting b26
b27 = block(27, spend=out7)
yield rejected()
# Now try a too-large-coinbase script
tip(15)
b28 = block(28, spend=out6)
b28.vtx[0].vin[0].scriptSig = chr(0)*101
b28.vtx[0].rehash()
b28 = update_block(28, [])
yield rejected(RejectResult(16, 'bad-cb-length'))
# Extend the b28 chain to make sure alisinabitd isn't accepted b28
b29 = block(29, spend=out7)
# TODO: Should get a reject message back with "bad-prevblk", except
# there's a bug that prevents this from being detected. Just note
# failure for now, and add the reject result later.
yield rejected()
# b30 has a max-sized coinbase scriptSig.
tip(23)
b30 = block(30)
b30.vtx[0].vin[0].scriptSig = chr(0)*100
b30.vtx[0].rehash()
b30 = update_block(30, [])
yield accepted()
if __name__ == '__main__':
FullBlockTest().main()
| 40.08209 | 106 | 0.536461 |
from test_framework.test_framework import ComparisonTestFramework
from test_framework.util import *
from test_framework.comptool import TestManager, TestInstance, RejectResult
from test_framework.blocktools import *
import time
from test_framework.key import CECKey
from test_framework.script import CScript, SignatureHash, SIGHASH_ALL, OP_TRUE, OP_FALSE
class PreviousSpendableOutput(object):
def __init__(self, tx = CTransaction(), n = -1):
self.tx = tx
self.n = n
class FullBlockTest(ComparisonTestFramework):
def __init__(self):
self.num_nodes = 1
self.block_heights = {}
self.coinbase_key = CECKey()
self.coinbase_key.set_secretbytes(bytes("horsebattery"))
self.coinbase_pubkey = self.coinbase_key.get_pubkey()
self.block_time = int(time.time())+1
self.tip = None
self.blocks = {}
def run_test(self):
test = TestManager(self, self.options.tmpdir)
test.add_all_connections(self.nodes)
NetworkThread().start() # Start up network handling in another thread
test.run()
def add_transactions_to_block(self, block, tx_list):
[ tx.rehash() for tx in tx_list ]
block.vtx.extend(tx_list)
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
return block
# Create a block on top of self.tip, and advance self.tip to point to the new block
# if spend is specified, then 1 satoshi will be spent from that to an anyone-can-spend output,
# and rest will go to fees.
def next_block(self, number, spend=None, additional_coinbase_value=0, script=None):
if self.tip == None:
base_block_hash = self.genesis_hash
else:
base_block_hash = self.tip.sha256
# First create the coinbase
height = self.block_heights[base_block_hash] + 1
coinbase = create_coinbase(height, self.coinbase_pubkey)
coinbase.vout[0].nValue += additional_coinbase_value
if (spend != None):
coinbase.vout[0].nValue += spend.tx.vout[spend.n].nValue - 1 # all but one satoshi to fees
coinbase.rehash()
block = create_block(base_block_hash, coinbase, self.block_time)
if (spend != None):
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(spend.tx.sha256, spend.n), "", 0xffffffff)) # no signature yet
# This copies the java comparison tool testing behavior: the first
# txout has a garbage scriptPubKey, "to make sure we're not
# pre-verifying too much" (?)
tx.vout.append(CTxOut(0, CScript([random.randint(0,255), height & 255])))
if script == None:
tx.vout.append(CTxOut(1, CScript([OP_TRUE])))
else:
tx.vout.append(CTxOut(1, script))
scriptSig = ""
scriptPubKey = bytearray(spend.tx.vout[spend.n].scriptPubKey)
if (scriptPubKey[0] == OP_TRUE):
scriptSig = CScript([OP_TRUE])
else:
(sighash, err) = SignatureHash(spend.tx.vout[spend.n].scriptPubKey, tx, 0, SIGHASH_ALL)
scriptSig = CScript([self.coinbase_key.sign(sighash) + bytes(bytearray([SIGHASH_ALL]))])
tx.vin[0].scriptSig = scriptSig
block = self.add_transactions_to_block(block, [tx])
block.solve()
self.tip = block
self.block_heights[block.sha256] = height
self.block_time += 1
assert number not in self.blocks
self.blocks[number] = block
return block
def get_tests(self):
self.genesis_hash = int(self.nodes[0].getbestblockhash(), 16)
self.block_heights[self.genesis_hash] = 0
spendable_outputs = []
def save_spendable_output():
spendable_outputs.append(self.tip)
def get_spendable_output():
return PreviousSpendableOutput(spendable_outputs.pop(0).vtx[0], 0)
def accepted():
return TestInstance([[self.tip, True]])
def rejected(reject = None):
if reject is None:
return TestInstance([[self.tip, False]])
else:
return TestInstance([[self.tip, reject]])
def tip(number):
self.tip = self.blocks[number]
def update_block(block_number, new_transactions):
block = self.blocks[block_number]
old_hash = block.sha256
self.add_transactions_to_block(block, new_transactions)
block.solve()
self.tip = block
self.block_heights[block.sha256] = self.block_heights[old_hash]
del self.block_heights[old_hash]
self.blocks[block_number] = block
return block
block = self.next_block
block(0)
save_spendable_output()
yield accepted()
test = TestInstance(sync_every_block=False)
for i in range(99):
block(1000 + i)
test.blocks_and_transactions.append([self.tip, True])
save_spendable_output()
yield test
out0 = get_spendable_output()
block(1, spend=out0)
save_spendable_output()
yield accepted()
out1 = get_spendable_output()
b2 = block(2, spend=out1)
yield accepted()
tip(1)
b3 = block(3, spend=out1)
txout_b3 = PreviousSpendableOutput(b3.vtx[1], 1)
yield rejected()
out2 = get_spendable_output()
block(4, spend=out2)
yield accepted()
tip(2)
block(5, spend=out2)
save_spendable_output()
yield rejected()
out3 = get_spendable_output()
block(6, spend=out3)
yield accepted()
tip(5)
block(7, spend=out2)
yield rejected()
out4 = get_spendable_output()
block(8, spend=out4)
yield rejected()
tip(6)
block(9, spend=out4, additional_coinbase_value=1)
yield rejected(RejectResult(16, 'bad-cb-amount'))
tip(5)
block(10, spend=out3)
yield rejected()
block(11, spend=out4, additional_coinbase_value=1)
yield rejected(RejectResult(16, 'bad-cb-amount'))
tip(5)
b12 = block(12, spend=out3)
save_spendable_output()
b13 = block(13, spend=out4)
yield TestInstance([[CBlockHeader(b12), None], [b13, False]])
save_spendable_output()
out5 = get_spendable_output()
# b14 is invalid, but the node won't know that until it tries to connect
block(14, spend=out5, additional_coinbase_value=1)
yield rejected()
yield TestInstance([[b12, True, b13.sha256]]) # New tip should be b13.
# Add a block with MAX_BLOCK_SIGOPS and one with one more sigop
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b12 (3) -> b13 (4) -> b15 (5) -> b16 (6)
# \-> b3 (1) -> b4 (2)
# Test that a block with a lot of checksigs is okay
lots_of_checksigs = CScript([OP_CHECKSIG] * (1000000 / 50 - 1))
tip(13)
block(15, spend=out5, script=lots_of_checksigs)
yield accepted()
# Test that a block with too many checksigs is rejected
out6 = get_spendable_output()
too_many_checksigs = CScript([OP_CHECKSIG] * (1000000 / 50))
block(16, spend=out6, script=too_many_checksigs)
yield rejected(RejectResult(16, 'bad-blk-sigops'))
# Attempt to spend a transaction created on a different fork
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b12 (3) -> b13 (4) -> b15 (5) -> b17 (b3.vtx[1])
# \-> b3 (1) -> b4 (2)
tip(15)
block(17, spend=txout_b3)
yield rejected(RejectResult(16, 'bad-txns-inputs-missingorspent'))
# Attempt to spend a transaction created on a different fork (on a fork this time)
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b12 (3) -> b13 (4) -> b15 (5)
# \-> b18 (b3.vtx[1]) -> b19 (6)
# \-> b3 (1) -> b4 (2)
tip(13)
block(18, spend=txout_b3)
yield rejected()
block(19, spend=out6)
yield rejected()
# Attempt to spend a coinbase at depth too low
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b12 (3) -> b13 (4) -> b15 (5) -> b20 (7)
# \-> b3 (1) -> b4 (2)
tip(15)
out7 = get_spendable_output()
block(20, spend=out7)
yield rejected(RejectResult(16, 'bad-txns-premature-spend-of-coinbase'))
# Attempt to spend a coinbase at depth too low (on a fork this time)
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b12 (3) -> b13 (4) -> b15 (5)
# \-> b21 (6) -> b22 (5)
# \-> b3 (1) -> b4 (2)
tip(13)
block(21, spend=out6)
yield rejected()
block(22, spend=out5)
yield rejected()
# Create a block on either side of MAX_BLOCK_SIZE and make sure its accepted/rejected
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b12 (3) -> b13 (4) -> b15 (5) -> b23 (6)
# \-> b24 (6) -> b25 (7)
# \-> b3 (1) -> b4 (2)
tip(15)
b23 = block(23, spend=out6)
old_hash = b23.sha256
tx = CTransaction()
script_length = MAX_BLOCK_SIZE - len(b23.serialize()) - 69
script_output = CScript([chr(0)*script_length])
tx.vout.append(CTxOut(0, script_output))
tx.vin.append(CTxIn(COutPoint(b23.vtx[1].sha256, 1)))
b23 = update_block(23, [tx])
# Make sure the math above worked out to produce a max-sized block
assert_equal(len(b23.serialize()), MAX_BLOCK_SIZE)
yield accepted()
# Make the next block one byte bigger and check that it fails
tip(15)
b24 = block(24, spend=out6)
script_length = MAX_BLOCK_SIZE - len(b24.serialize()) - 69
script_output = CScript([chr(0)*(script_length+1)])
tx.vout = [CTxOut(0, script_output)]
b24 = update_block(24, [tx])
assert_equal(len(b24.serialize()), MAX_BLOCK_SIZE+1)
yield rejected(RejectResult(16, 'bad-blk-length'))
b25 = block(25, spend=out7)
yield rejected()
# Create blocks with a coinbase input script size out of range
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b12 (3) -> b13 (4) -> b15 (5) -> b23 (6) -> b30 (7)
# \-> ... (6) -> ... (7)
# \-> b3 (1) -> b4 (2)
tip(15)
b26 = block(26, spend=out6)
b26.vtx[0].vin[0].scriptSig = chr(0)
b26.vtx[0].rehash()
# update_block causes the merkle root to get updated, even with no new
# transactions, and updates the required state.
b26 = update_block(26, [])
yield rejected(RejectResult(16, 'bad-cb-length'))
# Extend the b26 chain to make sure alisinabitd isn't accepting b26
b27 = block(27, spend=out7)
yield rejected()
tip(15)
b28 = block(28, spend=out6)
b28.vtx[0].vin[0].scriptSig = chr(0)*101
b28.vtx[0].rehash()
b28 = update_block(28, [])
yield rejected(RejectResult(16, 'bad-cb-length'))
b29 = block(29, spend=out7)
# TODO: Should get a reject message back with "bad-prevblk", except
# there's a bug that prevents this from being detected. Just note
yield rejected()
tip(23)
b30 = block(30)
b30.vtx[0].vin[0].scriptSig = chr(0)*100
b30.vtx[0].rehash()
b30 = update_block(30, [])
yield accepted()
if __name__ == '__main__':
FullBlockTest().main()
| true | true |
f71cfd0aee39e98c974f9c4ad5bc7792c8b07739 | 1,316 | py | Python | python/oneflow/test/modules/test_consistent_dot.py | L-Net-1992/oneflow | 4dc08d65caea36fdd137841ac95551218897e730 | [
"Apache-2.0"
] | 1 | 2022-03-14T11:17:56.000Z | 2022-03-14T11:17:56.000Z | python/oneflow/test/modules/test_consistent_dot.py | L-Net-1992/oneflow | 4dc08d65caea36fdd137841ac95551218897e730 | [
"Apache-2.0"
] | null | null | null | python/oneflow/test/modules/test_consistent_dot.py | L-Net-1992/oneflow | 4dc08d65caea36fdd137841ac95551218897e730 | [
"Apache-2.0"
] | 1 | 2021-12-15T02:14:49.000Z | 2021-12-15T02:14:49.000Z | """
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
import oneflow as flow
import oneflow.unittest
from oneflow.test_utils.automated_test_util import *
@autotest(n=1, check_graph=False)
def do_test_dot_impl(test_case, placement, sbp):
k = random(100, 1000) * 8
x = random_tensor(ndim=1, dim0=k).to_global(placement=placement, sbp=sbp)
y = random_tensor(ndim=1, dim0=k).to_global(placement=placement, sbp=sbp)
z = torch.dot(x, y)
return z
class TestDotConsistent(flow.unittest.TestCase):
@globaltest
def test_dot(test_case):
for placement in all_placement():
for sbp in all_sbp(placement, max_dim=1):
do_test_dot_impl(test_case, placement, sbp)
if __name__ == "__main__":
unittest.main()
| 31.333333 | 77 | 0.740122 | import unittest
import oneflow as flow
import oneflow.unittest
from oneflow.test_utils.automated_test_util import *
@autotest(n=1, check_graph=False)
def do_test_dot_impl(test_case, placement, sbp):
k = random(100, 1000) * 8
x = random_tensor(ndim=1, dim0=k).to_global(placement=placement, sbp=sbp)
y = random_tensor(ndim=1, dim0=k).to_global(placement=placement, sbp=sbp)
z = torch.dot(x, y)
return z
class TestDotConsistent(flow.unittest.TestCase):
@globaltest
def test_dot(test_case):
for placement in all_placement():
for sbp in all_sbp(placement, max_dim=1):
do_test_dot_impl(test_case, placement, sbp)
if __name__ == "__main__":
unittest.main()
| true | true |
f71cfdceb8c455928ba52221223c74b392f337c7 | 5,558 | py | Python | sympy/concrete/gosper.py | shilpiprd/sympy | 556e9c61b31d0d5f101cd56b43e843fbf3bcf121 | [
"BSD-3-Clause"
] | 8,323 | 2015-01-02T15:51:43.000Z | 2022-03-31T13:13:19.000Z | sympy/concrete/gosper.py | shilpiprd/sympy | 556e9c61b31d0d5f101cd56b43e843fbf3bcf121 | [
"BSD-3-Clause"
] | 15,102 | 2015-01-01T01:33:17.000Z | 2022-03-31T22:53:13.000Z | sympy/concrete/gosper.py | shilpiprd/sympy | 556e9c61b31d0d5f101cd56b43e843fbf3bcf121 | [
"BSD-3-Clause"
] | 4,490 | 2015-01-01T17:48:07.000Z | 2022-03-31T17:24:05.000Z | """Gosper's algorithm for hypergeometric summation. """
from sympy.core import S, Dummy, symbols
from sympy.core.compatibility import is_sequence
from sympy.polys import Poly, parallel_poly_from_expr, factor
from sympy.solvers import solve
from sympy.simplify import hypersimp
def gosper_normal(f, g, n, polys=True):
r"""
Compute the Gosper's normal form of ``f`` and ``g``.
Explanation
===========
Given relatively prime univariate polynomials ``f`` and ``g``,
rewrite their quotient to a normal form defined as follows:
.. math::
\frac{f(n)}{g(n)} = Z \cdot \frac{A(n) C(n+1)}{B(n) C(n)}
where ``Z`` is an arbitrary constant and ``A``, ``B``, ``C`` are
monic polynomials in ``n`` with the following properties:
1. `\gcd(A(n), B(n+h)) = 1 \forall h \in \mathbb{N}`
2. `\gcd(B(n), C(n+1)) = 1`
3. `\gcd(A(n), C(n)) = 1`
This normal form, or rational factorization in other words, is a
crucial step in Gosper's algorithm and in solving of difference
equations. It can be also used to decide if two hypergeometric
terms are similar or not.
This procedure will return a tuple containing elements of this
factorization in the form ``(Z*A, B, C)``.
Examples
========
>>> from sympy.concrete.gosper import gosper_normal
>>> from sympy.abc import n
>>> gosper_normal(4*n+5, 2*(4*n+1)*(2*n+3), n, polys=False)
(1/4, n + 3/2, n + 1/4)
"""
(p, q), opt = parallel_poly_from_expr(
(f, g), n, field=True, extension=True)
a, A = p.LC(), p.monic()
b, B = q.LC(), q.monic()
C, Z = A.one, a/b
h = Dummy('h')
D = Poly(n + h, n, h, domain=opt.domain)
R = A.resultant(B.compose(D))
roots = set(R.ground_roots().keys())
for r in set(roots):
if not r.is_Integer or r < 0:
roots.remove(r)
for i in sorted(roots):
d = A.gcd(B.shift(+i))
A = A.quo(d)
B = B.quo(d.shift(-i))
for j in range(1, i + 1):
C *= d.shift(-j)
A = A.mul_ground(Z)
if not polys:
A = A.as_expr()
B = B.as_expr()
C = C.as_expr()
return A, B, C
def gosper_term(f, n):
r"""
Compute Gosper's hypergeometric term for ``f``.
Explanation
===========
Suppose ``f`` is a hypergeometric term such that:
.. math::
s_n = \sum_{k=0}^{n-1} f_k
and `f_k` doesn't depend on `n`. Returns a hypergeometric
term `g_n` such that `g_{n+1} - g_n = f_n`.
Examples
========
>>> from sympy.concrete.gosper import gosper_term
>>> from sympy.functions import factorial
>>> from sympy.abc import n
>>> gosper_term((4*n + 1)*factorial(n)/factorial(2*n + 1), n)
(-n - 1/2)/(n + 1/4)
"""
r = hypersimp(f, n)
if r is None:
return None # 'f' is *not* a hypergeometric term
p, q = r.as_numer_denom()
A, B, C = gosper_normal(p, q, n)
B = B.shift(-1)
N = S(A.degree())
M = S(B.degree())
K = S(C.degree())
if (N != M) or (A.LC() != B.LC()):
D = {K - max(N, M)}
elif not N:
D = {K - N + 1, S.Zero}
else:
D = {K - N + 1, (B.nth(N - 1) - A.nth(N - 1))/A.LC()}
for d in set(D):
if not d.is_Integer or d < 0:
D.remove(d)
if not D:
return None # 'f(n)' is *not* Gosper-summable
d = max(D)
coeffs = symbols('c:%s' % (d + 1), cls=Dummy)
domain = A.get_domain().inject(*coeffs)
x = Poly(coeffs, n, domain=domain)
H = A*x.shift(1) - B*x - C
solution = solve(H.coeffs(), coeffs)
if solution is None:
return None # 'f(n)' is *not* Gosper-summable
x = x.as_expr().subs(solution)
for coeff in coeffs:
if coeff not in solution:
x = x.subs(coeff, 0)
if x.is_zero:
return None # 'f(n)' is *not* Gosper-summable
else:
return B.as_expr()*x/C.as_expr()
def gosper_sum(f, k):
r"""
Gosper's hypergeometric summation algorithm.
Explanation
===========
Given a hypergeometric term ``f`` such that:
.. math ::
s_n = \sum_{k=0}^{n-1} f_k
and `f(n)` doesn't depend on `n`, returns `g_{n} - g(0)` where
`g_{n+1} - g_n = f_n`, or ``None`` if `s_n` cannot be expressed
in closed form as a sum of hypergeometric terms.
Examples
========
>>> from sympy.concrete.gosper import gosper_sum
>>> from sympy.functions import factorial
>>> from sympy.abc import n, k
>>> f = (4*k + 1)*factorial(k)/factorial(2*k + 1)
>>> gosper_sum(f, (k, 0, n))
(-factorial(n) + 2*factorial(2*n + 1))/factorial(2*n + 1)
>>> _.subs(n, 2) == sum(f.subs(k, i) for i in [0, 1, 2])
True
>>> gosper_sum(f, (k, 3, n))
(-60*factorial(n) + factorial(2*n + 1))/(60*factorial(2*n + 1))
>>> _.subs(n, 5) == sum(f.subs(k, i) for i in [3, 4, 5])
True
References
==========
.. [1] Marko Petkovsek, Herbert S. Wilf, Doron Zeilberger, A = B,
AK Peters, Ltd., Wellesley, MA, USA, 1997, pp. 73--100
"""
indefinite = False
if is_sequence(k):
k, a, b = k
else:
indefinite = True
g = gosper_term(f, k)
if g is None:
return None
if indefinite:
result = f*g
else:
result = (f*(g + 1)).subs(k, b) - (f*g).subs(k, a)
if result is S.NaN:
try:
result = (f*(g + 1)).limit(k, b) - (f*g).limit(k, a)
except NotImplementedError:
result = None
return factor(result)
| 24.377193 | 69 | 0.536704 |
from sympy.core import S, Dummy, symbols
from sympy.core.compatibility import is_sequence
from sympy.polys import Poly, parallel_poly_from_expr, factor
from sympy.solvers import solve
from sympy.simplify import hypersimp
def gosper_normal(f, g, n, polys=True):
(p, q), opt = parallel_poly_from_expr(
(f, g), n, field=True, extension=True)
a, A = p.LC(), p.monic()
b, B = q.LC(), q.monic()
C, Z = A.one, a/b
h = Dummy('h')
D = Poly(n + h, n, h, domain=opt.domain)
R = A.resultant(B.compose(D))
roots = set(R.ground_roots().keys())
for r in set(roots):
if not r.is_Integer or r < 0:
roots.remove(r)
for i in sorted(roots):
d = A.gcd(B.shift(+i))
A = A.quo(d)
B = B.quo(d.shift(-i))
for j in range(1, i + 1):
C *= d.shift(-j)
A = A.mul_ground(Z)
if not polys:
A = A.as_expr()
B = B.as_expr()
C = C.as_expr()
return A, B, C
def gosper_term(f, n):
r = hypersimp(f, n)
if r is None:
return None
p, q = r.as_numer_denom()
A, B, C = gosper_normal(p, q, n)
B = B.shift(-1)
N = S(A.degree())
M = S(B.degree())
K = S(C.degree())
if (N != M) or (A.LC() != B.LC()):
D = {K - max(N, M)}
elif not N:
D = {K - N + 1, S.Zero}
else:
D = {K - N + 1, (B.nth(N - 1) - A.nth(N - 1))/A.LC()}
for d in set(D):
if not d.is_Integer or d < 0:
D.remove(d)
if not D:
return None
d = max(D)
coeffs = symbols('c:%s' % (d + 1), cls=Dummy)
domain = A.get_domain().inject(*coeffs)
x = Poly(coeffs, n, domain=domain)
H = A*x.shift(1) - B*x - C
solution = solve(H.coeffs(), coeffs)
if solution is None:
return None
x = x.as_expr().subs(solution)
for coeff in coeffs:
if coeff not in solution:
x = x.subs(coeff, 0)
if x.is_zero:
return None
else:
return B.as_expr()*x/C.as_expr()
def gosper_sum(f, k):
indefinite = False
if is_sequence(k):
k, a, b = k
else:
indefinite = True
g = gosper_term(f, k)
if g is None:
return None
if indefinite:
result = f*g
else:
result = (f*(g + 1)).subs(k, b) - (f*g).subs(k, a)
if result is S.NaN:
try:
result = (f*(g + 1)).limit(k, b) - (f*g).limit(k, a)
except NotImplementedError:
result = None
return factor(result)
| true | true |
f71cfee5730a3abc93dbfc1a9dac679b29cf53c2 | 17,212 | py | Python | tests/test_authentication.py | crawlersick/mysql-connector-python | 9a224b96250ba81f2bcc279496befcff309ae88a | [
"BSD-3-Clause"
] | 1 | 2021-08-04T21:37:23.000Z | 2021-08-04T21:37:23.000Z | tests/test_authentication.py | maximmasiutin/mysql-connector-python | 9d5e6f532a0342795f380ee1cfeeb93adbe333a0 | [
"BSD-3-Clause"
] | null | null | null | tests/test_authentication.py | maximmasiutin/mysql-connector-python | 9d5e6f532a0342795f380ee1cfeeb93adbe333a0 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright (c) 2014, 2020, Oracle and/or its affiliates.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License, version 2.0, as
# published by the Free Software Foundation.
#
# This program is also distributed with certain software (including
# but not limited to OpenSSL) that is licensed under separate terms,
# as designated in a particular file or component or in included license
# documentation. The authors of MySQL hereby grant you an
# additional permission to link the program and your derivative works
# with the separately licensed software that they have included with
# MySQL.
#
# Without limiting anything contained in the foregoing, this file,
# which is part of MySQL Connector/Python, is also subject to the
# Universal FOSS Exception, version 1.0, a copy of which can be found at
# http://oss.oracle.com/licenses/universal-foss-exception.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License, version 2.0, for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""Test module for authentication
"""
import inspect
import sys
import mysql.connector
from mysql.connector import authentication
from mysql.connector.errors import InterfaceError
import tests
_STANDARD_PLUGINS = (
'mysql_native_password',
'mysql_clear_password',
'sha256_password',
)
class AuthenticationModuleTests(tests.MySQLConnectorTests):
"""Tests globals and functions of the authentication module"""
def test_get_auth_plugin(self):
self.assertRaises(mysql.connector.NotSupportedError,
authentication.get_auth_plugin, 'spam')
self.assertRaises(mysql.connector.NotSupportedError,
authentication.get_auth_plugin, '')
# Test using standard plugins
plugin_classes = {}
for name, obj in inspect.getmembers(authentication):
if inspect.isclass(obj) and hasattr(obj, 'plugin_name'):
if obj.plugin_name:
plugin_classes[obj.plugin_name] = obj
for plugin_name in _STANDARD_PLUGINS:
self.assertEqual(plugin_classes[plugin_name],
authentication.get_auth_plugin(plugin_name),
"Failed getting class for {0}".format(plugin_name))
class BaseAuthPluginTests(tests.MySQLConnectorTests):
"""Tests authentication.BaseAuthPlugin"""
def test_class(self):
self.assertEqual('', authentication.BaseAuthPlugin.plugin_name)
self.assertEqual(False, authentication.BaseAuthPlugin.requires_ssl)
def test___init__(self):
base = authentication.BaseAuthPlugin('ham')
self.assertEqual('ham', base._auth_data)
self.assertEqual(None, base._username)
self.assertEqual(None, base._password)
self.assertEqual(None, base._database)
self.assertEqual(False, base._ssl_enabled)
base = authentication.BaseAuthPlugin(
'spam', username='ham', password='secret',
database='test', ssl_enabled=True)
self.assertEqual('spam', base._auth_data)
self.assertEqual('ham', base._username)
self.assertEqual('secret', base._password)
self.assertEqual('test', base._database)
self.assertEqual(True, base._ssl_enabled)
def test_prepare_password(self):
base = authentication.BaseAuthPlugin('ham')
self.assertRaises(NotImplementedError, base.prepare_password)
def test_auth_response(self):
base = authentication.BaseAuthPlugin('ham')
self.assertRaises(NotImplementedError, base.auth_response)
base.requires_ssl = True
self.assertRaises(mysql.connector.InterfaceError, base.auth_response)
class MySQLNativePasswordAuthPluginTests(tests.MySQLConnectorTests):
"""Tests authentication.MySQLNativePasswordAuthPlugin"""
def setUp(self):
self.plugin_class = authentication.MySQLNativePasswordAuthPlugin
def test_class(self):
self.assertEqual('mysql_native_password', self.plugin_class.plugin_name)
self.assertEqual(False, self.plugin_class.requires_ssl)
def test_prepare_password(self):
auth_plugin = self.plugin_class(None, password='spam')
self.assertRaises(mysql.connector.InterfaceError,
auth_plugin.prepare_password)
auth_plugin = self.plugin_class(123456, password='spam') # too long
self.assertRaises(mysql.connector.InterfaceError,
auth_plugin.prepare_password)
empty = b''
auth_data = (
b'\x2d\x3e\x33\x25\x5b\x7d\x25\x3c\x40\x6b'
b'\x7b\x47\x30\x5b\x57\x25\x51\x48\x55\x53'
)
auth_response = (
b'\x73\xb8\xf0\x4b\x3a\xa5\x7c\x46\xb9\x84'
b'\x90\x50\xab\xc0\x3a\x0f\x8f\xad\x51\xa3'
)
auth_plugin = self.plugin_class('\x3f'*20, password=None)
self.assertEqual(empty, auth_plugin.prepare_password())
auth_plugin = self.plugin_class(auth_data, password='spam')
self.assertEqual(auth_response, auth_plugin.prepare_password())
self.assertEqual(auth_response, auth_plugin.auth_response())
class MySQLClearPasswordAuthPluginTests(tests.MySQLConnectorTests):
"""Tests authentication.MySQLClearPasswordAuthPlugin"""
def setUp(self):
self.plugin_class = authentication.MySQLClearPasswordAuthPlugin
def test_class(self):
self.assertEqual('mysql_clear_password', self.plugin_class.plugin_name)
self.assertEqual(True, self.plugin_class.requires_ssl)
def test_prepare_password(self):
exp = b'spam\x00'
auth_plugin = self.plugin_class(None, password='spam', ssl_enabled=True)
self.assertEqual(exp, auth_plugin.prepare_password())
self.assertEqual(exp, auth_plugin.auth_response())
class MySQLSHA256PasswordAuthPluginTests(tests.MySQLConnectorTests):
"""Tests authentication.MySQLSHA256PasswordAuthPlugin"""
def setUp(self):
self.plugin_class = authentication.MySQLSHA256PasswordAuthPlugin
def test_class(self):
self.assertEqual('sha256_password', self.plugin_class.plugin_name)
self.assertEqual(True, self.plugin_class.requires_ssl)
def test_prepare_password(self):
exp = b'spam\x00'
auth_plugin = self.plugin_class(None, password='spam', ssl_enabled=True)
self.assertEqual(exp, auth_plugin.prepare_password())
self.assertEqual(exp, auth_plugin.auth_response())
class MySQLLdapSaslPasswordAuthPluginTests(tests.MySQLConnectorTests):
"""Tests authentication.MySQLLdapSaslPasswordAuthPlugin"""
def setUp(self):
self.plugin_class = authentication.MySQLLdapSaslPasswordAuthPlugin
def test_class(self):
self.assertEqual("authentication_ldap_sasl_client",
self.plugin_class.plugin_name)
self.assertEqual(False, self.plugin_class.requires_ssl)
def test_auth_response(self):
# Test unsupported mechanism error message
auth_data = b'UNKOWN-METHOD'
auth_plugin = self.plugin_class(auth_data, username="user",
password="spam")
with self.assertRaises(InterfaceError) as context:
auth_plugin.auth_response()
self.assertIn("sasl authentication method", context.exception.msg,
"not the expected error {}".format(context.exception.msg))
self.assertIn("is not supported", context.exception.msg,
"not the expected error {}".format(context.exception.msg))
with self.assertRaises(NotImplementedError) as context:
auth_plugin.prepare_password()
# Test SCRAM-SHA-1 mechanism is accepted
auth_data = b'SCRAM-SHA-1'
auth_plugin = self.plugin_class(auth_data, username="",
password="")
# Verify the format of the first message from client.
exp = b'n,a=,n=,r='
client_first_nsg = auth_plugin.auth_response()
self.assertTrue(client_first_nsg.startswith(exp),
"got header: {}".format(auth_plugin.auth_response()))
auth_plugin = self.plugin_class(auth_data, username="user",
password="spam")
# Verify the length of the client's nonce in r=
cnonce = client_first_nsg[(len(b'n,a=,n=,r=')):]
r_len = len(cnonce)
self.assertEqual(32, r_len, "Unexpected legth {}".format(len(cnonce)))
# Verify the format of the first message from client.
exp = b'n,a=user,n=user,r='
client_first_nsg = auth_plugin.auth_response()
self.assertTrue(client_first_nsg.startswith(exp),
"got header: {}".format(auth_plugin.auth_response()))
# Verify the length of the client's nonce in r=
cnonce = client_first_nsg[(len(exp)):]
r_len = len(cnonce)
self.assertEqual(32, r_len, "Unexpected cnonce legth {}, response {}"
"".format(len(cnonce), client_first_nsg))
# Verify that a user name that requires character mapping is mapped
auth_plugin = self.plugin_class(auth_data, username=u"u\u1680ser",
password="spam")
exp = b'n,a=u ser,n=u ser,r='
client_first_nsg = auth_plugin.auth_response()
self.assertTrue(client_first_nsg.startswith(exp),
"got header: {}".format(auth_plugin.auth_response()))
# Verify the length of the client's nonce in r=
cnonce = client_first_nsg[(len(exp)):]
r_len = len(cnonce)
self.assertEqual(32, r_len, "Unexpected legth {}".format(len(cnonce)))
bad_responses = [None, "", "v=5H6b+IApa7ZwqQ/ZT33fXoR/BTM=", b"", 123]
for bad_res in bad_responses:
# verify an error is shown if server response is not as expected.
with self.assertRaises(InterfaceError) as context:
auth_plugin.auth_continue(bad_res)
self.assertIn("Unexpected server message", context.exception.msg,
"not the expected: {}".format(context.exception.msg))
# verify an error is shown if server response is not well formated.
with self.assertRaises(InterfaceError) as context:
auth_plugin.auth_continue(
bytearray("r=/ZT33fXoR/BZT,s=IApa7ZwqQ/ZT,w54".encode()))
self.assertIn("Incomplete reponse", context.exception.msg,
"not the expected error {}".format(context.exception.msg))
# verify an error is shown if server does not authenticate response.
with self.assertRaises(InterfaceError) as context:
auth_plugin.auth_continue(
bytearray("r=/ZT33fXoR/BZT,s=IApa7ZwqQ/ZT,i=40".encode()))
self.assertIn("Unable to authenticate resp", context.exception.msg,
"not the expected error {}".format(context.exception.msg))
bad_proofs = [None, "", b"5H6b+IApa7ZwqQ/ZT33fXoR/BTM=", b"", 123]
for bad_proof in bad_proofs:
# verify an error is shown if server proof is not well formated.
with self.assertRaises(InterfaceError) as context:
auth_plugin.auth_finalize(bad_proof)
self.assertIn("proof is not well formated.", context.exception.msg,
"not the expected: {}".format(context.exception.msg))
# verify an error is shown it the server can not prove it self.
with self.assertRaises(InterfaceError) as context:
auth_plugin.auth_finalize(
bytearray(b"v=5H6b+IApa7ZwqQ/ZT33fXoR/BTM="))
self.assertIn("Unable to proof server identity", context.exception.msg,
"not the expected error {}".format(context.exception.msg))
def test_auth_response256(self):
# Test unsupported mechanism error message
auth_data = b'UNKOWN-METHOD'
auth_plugin = self.plugin_class(auth_data, username="user",
password="spam")
with self.assertRaises(InterfaceError) as context:
auth_plugin.auth_response()
self.assertIn('sasl authentication method "UNKOWN-METHOD"',
context.exception.msg, "not the expected error {}"
"".format(context.exception.msg))
self.assertIn("is not supported", context.exception.msg,
"not the expected error {}".format(context.exception.msg))
with self.assertRaises(NotImplementedError) as context:
auth_plugin.prepare_password()
# Test SCRAM-SHA-256 mechanism is accepted
auth_data = b'SCRAM-SHA-256'
auth_plugin = self.plugin_class(auth_data, username="", password="")
# Verify the format of the first message from client.
exp = b'n,a=,n=,r='
client_first_nsg = auth_plugin.auth_response()
self.assertTrue(client_first_nsg.startswith(exp),
"got header: {}".format(auth_plugin.auth_response()))
auth_plugin = self.plugin_class(auth_data, username="user",
password="spam")
# Verify the length of the client's nonce in r=
cnonce = client_first_nsg[(len(b'n,a=,n=,r=')):]
r_len = len(cnonce)
self.assertEqual(32, r_len, "Unexpected legth {}".format(len(cnonce)))
# Verify the format of the first message from client.
exp = b'n,a=user,n=user,r='
client_first_nsg = auth_plugin.auth_response()
self.assertTrue(client_first_nsg.startswith(exp),
"got header: {}".format(auth_plugin.auth_response()))
# Verify the length of the client's nonce in r=
cnonce = client_first_nsg[(len(exp)):]
r_len = len(cnonce)
self.assertEqual(32, r_len, "Unexpected cnonce legth {}, response {}"
"".format(len(cnonce), client_first_nsg))
# Verify that a user name that requires character mapping is mapped
auth_plugin = self.plugin_class(auth_data, username=u"u\u1680ser",
password="spam")
exp = b'n,a=u ser,n=u ser,r='
client_first_nsg = auth_plugin.auth_response()
self.assertTrue(client_first_nsg.startswith(exp),
"got header: {}".format(auth_plugin.auth_response()))
# Verify the length of the client's nonce in r=
cnonce = client_first_nsg[(len(exp)):]
r_len = len(cnonce)
self.assertEqual(32, r_len, "Unexpected legth {}".format(len(cnonce)))
bad_responses = [None, "", "v=5H6b+IApa7ZwqQ/ZT33fXoR/BTM=", b"", 123]
for bad_res in bad_responses:
# verify an error is shown if server response is not as expected.
with self.assertRaises(InterfaceError) as context:
auth_plugin.auth_continue(bad_res)
self.assertIn("Unexpected server message", context.exception.msg,
"not the expected: {}".format(context.exception.msg))
# verify an error is shown if server response is not well formated.
with self.assertRaises(InterfaceError) as context:
auth_plugin.auth_continue(
bytearray(b"r=/ZT33fXoR/BZT,s=IApa7ZwqQ/ZT,w54"))
self.assertIn("Incomplete reponse", context.exception.msg,
"not the expected error {}".format(context.exception.msg))
# verify an error is shown if server does not authenticate response.
with self.assertRaises(InterfaceError) as context:
auth_plugin.auth_continue(
bytearray(b"r=/ZT33fXoR/BZT,s=IApa7ZwqQ/ZT,i=40"))
self.assertIn("Unable to authenticate resp", context.exception.msg,
"not the expected error {}".format(context.exception.msg))
bad_proofs = [None, "", b"5H6b+IApa7ZwqQ/ZT33fXoR/BTM=", b"", 123]
for bad_proof in bad_proofs:
# verify an error is shown if server proof is not well formated.
with self.assertRaises(InterfaceError) as context:
auth_plugin.auth_finalize(bad_proof)
self.assertIn("proof is not well formated.", context.exception.msg,
"not the expected: {}".format(context.exception.msg))
# verify an error is shown it the server can not prove it self.
with self.assertRaises(InterfaceError) as context:
auth_plugin.auth_finalize(
bytearray(b"v=5H6b+IApa7ZwqQ/ZT33fXoR/BTM="))
self.assertIn("Unable to proof server identity", context.exception.msg,
"not the expected error {}".format(context.exception.msg))
| 44.246787 | 80 | 0.6528 |
import inspect
import sys
import mysql.connector
from mysql.connector import authentication
from mysql.connector.errors import InterfaceError
import tests
_STANDARD_PLUGINS = (
'mysql_native_password',
'mysql_clear_password',
'sha256_password',
)
class AuthenticationModuleTests(tests.MySQLConnectorTests):
def test_get_auth_plugin(self):
self.assertRaises(mysql.connector.NotSupportedError,
authentication.get_auth_plugin, 'spam')
self.assertRaises(mysql.connector.NotSupportedError,
authentication.get_auth_plugin, '')
plugin_classes = {}
for name, obj in inspect.getmembers(authentication):
if inspect.isclass(obj) and hasattr(obj, 'plugin_name'):
if obj.plugin_name:
plugin_classes[obj.plugin_name] = obj
for plugin_name in _STANDARD_PLUGINS:
self.assertEqual(plugin_classes[plugin_name],
authentication.get_auth_plugin(plugin_name),
"Failed getting class for {0}".format(plugin_name))
class BaseAuthPluginTests(tests.MySQLConnectorTests):
def test_class(self):
self.assertEqual('', authentication.BaseAuthPlugin.plugin_name)
self.assertEqual(False, authentication.BaseAuthPlugin.requires_ssl)
def test___init__(self):
base = authentication.BaseAuthPlugin('ham')
self.assertEqual('ham', base._auth_data)
self.assertEqual(None, base._username)
self.assertEqual(None, base._password)
self.assertEqual(None, base._database)
self.assertEqual(False, base._ssl_enabled)
base = authentication.BaseAuthPlugin(
'spam', username='ham', password='secret',
database='test', ssl_enabled=True)
self.assertEqual('spam', base._auth_data)
self.assertEqual('ham', base._username)
self.assertEqual('secret', base._password)
self.assertEqual('test', base._database)
self.assertEqual(True, base._ssl_enabled)
def test_prepare_password(self):
base = authentication.BaseAuthPlugin('ham')
self.assertRaises(NotImplementedError, base.prepare_password)
def test_auth_response(self):
base = authentication.BaseAuthPlugin('ham')
self.assertRaises(NotImplementedError, base.auth_response)
base.requires_ssl = True
self.assertRaises(mysql.connector.InterfaceError, base.auth_response)
class MySQLNativePasswordAuthPluginTests(tests.MySQLConnectorTests):
def setUp(self):
self.plugin_class = authentication.MySQLNativePasswordAuthPlugin
def test_class(self):
self.assertEqual('mysql_native_password', self.plugin_class.plugin_name)
self.assertEqual(False, self.plugin_class.requires_ssl)
def test_prepare_password(self):
auth_plugin = self.plugin_class(None, password='spam')
self.assertRaises(mysql.connector.InterfaceError,
auth_plugin.prepare_password)
auth_plugin = self.plugin_class(123456, password='spam')
self.assertRaises(mysql.connector.InterfaceError,
auth_plugin.prepare_password)
empty = b''
auth_data = (
b'\x2d\x3e\x33\x25\x5b\x7d\x25\x3c\x40\x6b'
b'\x7b\x47\x30\x5b\x57\x25\x51\x48\x55\x53'
)
auth_response = (
b'\x73\xb8\xf0\x4b\x3a\xa5\x7c\x46\xb9\x84'
b'\x90\x50\xab\xc0\x3a\x0f\x8f\xad\x51\xa3'
)
auth_plugin = self.plugin_class('\x3f'*20, password=None)
self.assertEqual(empty, auth_plugin.prepare_password())
auth_plugin = self.plugin_class(auth_data, password='spam')
self.assertEqual(auth_response, auth_plugin.prepare_password())
self.assertEqual(auth_response, auth_plugin.auth_response())
class MySQLClearPasswordAuthPluginTests(tests.MySQLConnectorTests):
def setUp(self):
self.plugin_class = authentication.MySQLClearPasswordAuthPlugin
def test_class(self):
self.assertEqual('mysql_clear_password', self.plugin_class.plugin_name)
self.assertEqual(True, self.plugin_class.requires_ssl)
def test_prepare_password(self):
exp = b'spam\x00'
auth_plugin = self.plugin_class(None, password='spam', ssl_enabled=True)
self.assertEqual(exp, auth_plugin.prepare_password())
self.assertEqual(exp, auth_plugin.auth_response())
class MySQLSHA256PasswordAuthPluginTests(tests.MySQLConnectorTests):
def setUp(self):
self.plugin_class = authentication.MySQLSHA256PasswordAuthPlugin
def test_class(self):
self.assertEqual('sha256_password', self.plugin_class.plugin_name)
self.assertEqual(True, self.plugin_class.requires_ssl)
def test_prepare_password(self):
exp = b'spam\x00'
auth_plugin = self.plugin_class(None, password='spam', ssl_enabled=True)
self.assertEqual(exp, auth_plugin.prepare_password())
self.assertEqual(exp, auth_plugin.auth_response())
class MySQLLdapSaslPasswordAuthPluginTests(tests.MySQLConnectorTests):
def setUp(self):
self.plugin_class = authentication.MySQLLdapSaslPasswordAuthPlugin
def test_class(self):
self.assertEqual("authentication_ldap_sasl_client",
self.plugin_class.plugin_name)
self.assertEqual(False, self.plugin_class.requires_ssl)
def test_auth_response(self):
auth_data = b'UNKOWN-METHOD'
auth_plugin = self.plugin_class(auth_data, username="user",
password="spam")
with self.assertRaises(InterfaceError) as context:
auth_plugin.auth_response()
self.assertIn("sasl authentication method", context.exception.msg,
"not the expected error {}".format(context.exception.msg))
self.assertIn("is not supported", context.exception.msg,
"not the expected error {}".format(context.exception.msg))
with self.assertRaises(NotImplementedError) as context:
auth_plugin.prepare_password()
auth_data = b'SCRAM-SHA-1'
auth_plugin = self.plugin_class(auth_data, username="",
password="")
exp = b'n,a=,n=,r='
client_first_nsg = auth_plugin.auth_response()
self.assertTrue(client_first_nsg.startswith(exp),
"got header: {}".format(auth_plugin.auth_response()))
auth_plugin = self.plugin_class(auth_data, username="user",
password="spam")
cnonce = client_first_nsg[(len(b'n,a=,n=,r=')):]
r_len = len(cnonce)
self.assertEqual(32, r_len, "Unexpected legth {}".format(len(cnonce)))
# Verify the format of the first message from client.
exp = b'n,a=user,n=user,r='
client_first_nsg = auth_plugin.auth_response()
self.assertTrue(client_first_nsg.startswith(exp),
"got header: {}".format(auth_plugin.auth_response()))
# Verify the length of the client's nonce in r=
cnonce = client_first_nsg[(len(exp)):]
r_len = len(cnonce)
self.assertEqual(32, r_len, "Unexpected cnonce legth {}, response {}"
"".format(len(cnonce), client_first_nsg))
auth_plugin = self.plugin_class(auth_data, username=u"u\u1680ser",
password="spam")
exp = b'n,a=u ser,n=u ser,r='
client_first_nsg = auth_plugin.auth_response()
self.assertTrue(client_first_nsg.startswith(exp),
"got header: {}".format(auth_plugin.auth_response()))
cnonce = client_first_nsg[(len(exp)):]
r_len = len(cnonce)
self.assertEqual(32, r_len, "Unexpected legth {}".format(len(cnonce)))
bad_responses = [None, "", "v=5H6b+IApa7ZwqQ/ZT33fXoR/BTM=", b"", 123]
for bad_res in bad_responses:
# verify an error is shown if server response is not as expected.
with self.assertRaises(InterfaceError) as context:
auth_plugin.auth_continue(bad_res)
self.assertIn("Unexpected server message", context.exception.msg,
"not the expected: {}".format(context.exception.msg))
# verify an error is shown if server response is not well formated.
with self.assertRaises(InterfaceError) as context:
auth_plugin.auth_continue(
bytearray("r=/ZT33fXoR/BZT,s=IApa7ZwqQ/ZT,w54".encode()))
self.assertIn("Incomplete reponse", context.exception.msg,
"not the expected error {}".format(context.exception.msg))
# verify an error is shown if server does not authenticate response.
with self.assertRaises(InterfaceError) as context:
auth_plugin.auth_continue(
bytearray("r=/ZT33fXoR/BZT,s=IApa7ZwqQ/ZT,i=40".encode()))
self.assertIn("Unable to authenticate resp", context.exception.msg,
"not the expected error {}".format(context.exception.msg))
bad_proofs = [None, "", b"5H6b+IApa7ZwqQ/ZT33fXoR/BTM=", b"", 123]
for bad_proof in bad_proofs:
# verify an error is shown if server proof is not well formated.
with self.assertRaises(InterfaceError) as context:
auth_plugin.auth_finalize(bad_proof)
self.assertIn("proof is not well formated.", context.exception.msg,
"not the expected: {}".format(context.exception.msg))
# verify an error is shown it the server can not prove it self.
with self.assertRaises(InterfaceError) as context:
auth_plugin.auth_finalize(
bytearray(b"v=5H6b+IApa7ZwqQ/ZT33fXoR/BTM="))
self.assertIn("Unable to proof server identity", context.exception.msg,
"not the expected error {}".format(context.exception.msg))
def test_auth_response256(self):
# Test unsupported mechanism error message
auth_data = b'UNKOWN-METHOD'
auth_plugin = self.plugin_class(auth_data, username="user",
password="spam")
with self.assertRaises(InterfaceError) as context:
auth_plugin.auth_response()
self.assertIn('sasl authentication method "UNKOWN-METHOD"',
context.exception.msg, "not the expected error {}"
"".format(context.exception.msg))
self.assertIn("is not supported", context.exception.msg,
"not the expected error {}".format(context.exception.msg))
with self.assertRaises(NotImplementedError) as context:
auth_plugin.prepare_password()
# Test SCRAM-SHA-256 mechanism is accepted
auth_data = b'SCRAM-SHA-256'
auth_plugin = self.plugin_class(auth_data, username="", password="")
# Verify the format of the first message from client.
exp = b'n,a=,n=,r='
client_first_nsg = auth_plugin.auth_response()
self.assertTrue(client_first_nsg.startswith(exp),
"got header: {}".format(auth_plugin.auth_response()))
auth_plugin = self.plugin_class(auth_data, username="user",
password="spam")
# Verify the length of the client's nonce in r=
cnonce = client_first_nsg[(len(b'n,a=,n=,r=')):]
r_len = len(cnonce)
self.assertEqual(32, r_len, "Unexpected legth {}".format(len(cnonce)))
exp = b'n,a=user,n=user,r='
client_first_nsg = auth_plugin.auth_response()
self.assertTrue(client_first_nsg.startswith(exp),
"got header: {}".format(auth_plugin.auth_response()))
cnonce = client_first_nsg[(len(exp)):]
r_len = len(cnonce)
self.assertEqual(32, r_len, "Unexpected cnonce legth {}, response {}"
"".format(len(cnonce), client_first_nsg))
# Verify that a user name that requires character mapping is mapped
auth_plugin = self.plugin_class(auth_data, username=u"u\u1680ser",
password="spam")
exp = b'n,a=u ser,n=u ser,r='
client_first_nsg = auth_plugin.auth_response()
self.assertTrue(client_first_nsg.startswith(exp),
"got header: {}".format(auth_plugin.auth_response()))
# Verify the length of the client's nonce in r=
cnonce = client_first_nsg[(len(exp)):]
r_len = len(cnonce)
self.assertEqual(32, r_len, "Unexpected legth {}".format(len(cnonce)))
bad_responses = [None, "", "v=5H6b+IApa7ZwqQ/ZT33fXoR/BTM=", b"", 123]
for bad_res in bad_responses:
with self.assertRaises(InterfaceError) as context:
auth_plugin.auth_continue(bad_res)
self.assertIn("Unexpected server message", context.exception.msg,
"not the expected: {}".format(context.exception.msg))
with self.assertRaises(InterfaceError) as context:
auth_plugin.auth_continue(
bytearray(b"r=/ZT33fXoR/BZT,s=IApa7ZwqQ/ZT,w54"))
self.assertIn("Incomplete reponse", context.exception.msg,
"not the expected error {}".format(context.exception.msg))
with self.assertRaises(InterfaceError) as context:
auth_plugin.auth_continue(
bytearray(b"r=/ZT33fXoR/BZT,s=IApa7ZwqQ/ZT,i=40"))
self.assertIn("Unable to authenticate resp", context.exception.msg,
"not the expected error {}".format(context.exception.msg))
bad_proofs = [None, "", b"5H6b+IApa7ZwqQ/ZT33fXoR/BTM=", b"", 123]
for bad_proof in bad_proofs:
with self.assertRaises(InterfaceError) as context:
auth_plugin.auth_finalize(bad_proof)
self.assertIn("proof is not well formated.", context.exception.msg,
"not the expected: {}".format(context.exception.msg))
with self.assertRaises(InterfaceError) as context:
auth_plugin.auth_finalize(
bytearray(b"v=5H6b+IApa7ZwqQ/ZT33fXoR/BTM="))
self.assertIn("Unable to proof server identity", context.exception.msg,
"not the expected error {}".format(context.exception.msg))
| true | true |
f71cfeeb4a73ec2b3b2ae1038b970b11d68fd0ee | 3,065 | py | Python | stock-filters/colan_create_data.py | bi3mer/GDMC | 6c619cbf907d8de17f9bd7b1390849201e977581 | [
"ISC"
] | null | null | null | stock-filters/colan_create_data.py | bi3mer/GDMC | 6c619cbf907d8de17f9bd7b1390849201e977581 | [
"ISC"
] | null | null | null | stock-filters/colan_create_data.py | bi3mer/GDMC | 6c619cbf907d8de17f9bd7b1390849201e977581 | [
"ISC"
] | null | null | null | from pymclevel import alphaMaterials, MCSchematic, MCLevel, BoundingBox
from pymclevel.box import Vector
from mcplatform import *
from tqdm import tqdm
inputs = (
("Selection Material Counter", "label"),
("Creator: Colan Biemer", "label")
)
DATA_DIRECTORY = "/home/colanbiemer/work/projects/mcedit_data/extracted_data/"
RESULT_FILE = DATA_DIRECTORY + "results.csv"
DATA_FILE = "data_"
EXTENSION = ".csv"
def calculate_box_size(index):
if index <= 1:
return 8
return calculate_box_size(index - 1) + (4 * 2 * index)
def calculate_width(index):
if index <= 1:
return 3
return calculate_width(index - 1) + 2
# __ ____ ____ __ _ _ __ ____ ____ ____ _ _ __ ____ __ ____ __ ____
# / ( _ (_ _( ( \/ ( (__ ( __) (_ _/ )( ( / ___) / _\/ ___)/ _\( _ \
# ( O ) __/ )( )(/ \/ \)( / _/ ) _) )( ) __ ()(\___ \ / \___ / \) __/
# \__(__) (__)(__\_)(_(__(____(____) (__)\_)(_(__(____/ \_/\_(____\_/\_(__)
def build_data(level, point, size_index):
min_x = point.x - size_index
min_y = point.y - size_index
min_z = point.z - size_index
max_x = point.x + size_index
max_y = point.y + size_index
max_z = point.z + size_index
data = []
for x in xrange(min_x, max_x + 1):
for y in xrange(min_y, max_y + 1):
for z in xrange(min_z, max_z + 1):
if x == point.x and y == point.y and z == point.z:
continue
data.append(level.blockAt(x,y,z))
return data
# @todo: handle distance from the ground and add that to each set for a toal
# of 12 datasets
def perform(level, box, options):
data_set_size = 6
final_x = box.origin.x + box.size.x
final_y = box.origin.y + box.size.y
final_z = box.origin.z + box.size.z
min_x = min(box.origin.x, final_x)
max_x = max(box.origin.x, final_x)
min_y = min(box.origin.y, final_y)
max_y = max(box.origin.y, final_y)
min_z = min(box.origin.z, final_z)
max_z = max(box.origin.z, final_z)
point = None
level_data = [[] for i in xrange(data_set_size)]
results = []
print "analyzing data"
for x in tqdm(xrange(min_x, max_x)):
for y in xrange(min_y, max_y):
for z in xrange(min_z, max_z):
point = Vector(x, y, z)
results.append(level.blockAt(x, y, z))
for size_index in xrange(data_set_size):
level_data[size_index].append(build_data(level, point, size_index + 1))
print "writing data to file"
for i in tqdm(xrange(data_set_size)):
data_file = DATA_DIRECTORY + DATA_FILE + str(i) + EXTENSION
result_file = DATA_DIRECTORY + RESULT_FILE + str(i) + EXTENSION
data = level_data[i]
with open(data_file, "a") as myfile:
for line in data:
myfile.write(','.join(str(x) for x in line) + "\n")
with open(RESULT_FILE, "a") as myfile:
for result in results:
myfile.write(str(result) + "\n")
print "completed"
| 30.346535 | 91 | 0.586623 | from pymclevel import alphaMaterials, MCSchematic, MCLevel, BoundingBox
from pymclevel.box import Vector
from mcplatform import *
from tqdm import tqdm
inputs = (
("Selection Material Counter", "label"),
("Creator: Colan Biemer", "label")
)
DATA_DIRECTORY = "/home/colanbiemer/work/projects/mcedit_data/extracted_data/"
RESULT_FILE = DATA_DIRECTORY + "results.csv"
DATA_FILE = "data_"
EXTENSION = ".csv"
def calculate_box_size(index):
if index <= 1:
return 8
return calculate_box_size(index - 1) + (4 * 2 * index)
def calculate_width(index):
if index <= 1:
return 3
return calculate_width(index - 1) + 2
def build_data(level, point, size_index):
min_x = point.x - size_index
min_y = point.y - size_index
min_z = point.z - size_index
max_x = point.x + size_index
max_y = point.y + size_index
max_z = point.z + size_index
data = []
for x in xrange(min_x, max_x + 1):
for y in xrange(min_y, max_y + 1):
for z in xrange(min_z, max_z + 1):
if x == point.x and y == point.y and z == point.z:
continue
data.append(level.blockAt(x,y,z))
return data
def perform(level, box, options):
data_set_size = 6
final_x = box.origin.x + box.size.x
final_y = box.origin.y + box.size.y
final_z = box.origin.z + box.size.z
min_x = min(box.origin.x, final_x)
max_x = max(box.origin.x, final_x)
min_y = min(box.origin.y, final_y)
max_y = max(box.origin.y, final_y)
min_z = min(box.origin.z, final_z)
max_z = max(box.origin.z, final_z)
point = None
level_data = [[] for i in xrange(data_set_size)]
results = []
print "analyzing data"
for x in tqdm(xrange(min_x, max_x)):
for y in xrange(min_y, max_y):
for z in xrange(min_z, max_z):
point = Vector(x, y, z)
results.append(level.blockAt(x, y, z))
for size_index in xrange(data_set_size):
level_data[size_index].append(build_data(level, point, size_index + 1))
print "writing data to file"
for i in tqdm(xrange(data_set_size)):
data_file = DATA_DIRECTORY + DATA_FILE + str(i) + EXTENSION
result_file = DATA_DIRECTORY + RESULT_FILE + str(i) + EXTENSION
data = level_data[i]
with open(data_file, "a") as myfile:
for line in data:
myfile.write(','.join(str(x) for x in line) + "\n")
with open(RESULT_FILE, "a") as myfile:
for result in results:
myfile.write(str(result) + "\n")
print "completed"
| false | true |
f71cffb860b58eeb76624d1f1b619b2ed27a057c | 1,477 | py | Python | main.py | Chise1/bilibili-live-tools | 23cef9ae6a42aeb89b64fc1558c4a94b1075444b | [
"MIT"
] | 1 | 2020-07-02T08:39:20.000Z | 2020-07-02T08:39:20.000Z | main.py | Chise1/bilibili-live-tools | 23cef9ae6a42aeb89b64fc1558c4a94b1075444b | [
"MIT"
] | null | null | null | main.py | Chise1/bilibili-live-tools | 23cef9ae6a42aeb89b64fc1558c4a94b1075444b | [
"MIT"
] | null | null | null | # !/usr/bin/python
# -*- coding:utf-8 -*-
import subprocess, time, sys
from subprocess import Popen
from typing import Optional
TIME = 3600
CMD = "run.py"
class Auto_Run():
def __init__(self, sleep_time, cmd):
if sys.version_info < (3, 6):
print("only support python 3.6 and later version")
sys.exit(1111)
self.sleep_time = sleep_time
self.cmd = cmd
self.ext = (cmd[-3:]).lower()
self.p:Optional[Popen[str]]= None
self.run()
try:
while 1:
time.sleep(sleep_time * 20)
self.poll = self.p.poll()
if self.p.poll() is None:
print("restarting......")
self.p.kill()
self.run()
else:
print("starting......")
self.run()
except KeyboardInterrupt as e:
print("exit???")
def run(self):
if self.ext == ".py":
print('start OK!')
# use now running python version, think multiple python installed and now use python3.6 to run
python_path = sys.executable
print("use the absolute path of python to run", python_path)
self.p = subprocess.Popen([python_path, '%s' % self.cmd], stdin=sys.stdin, stdout=sys.stdout,
stderr=sys.stderr, shell=False)
else:
pass
app = Auto_Run(TIME, CMD) | 32.108696 | 106 | 0.510494 |
import subprocess, time, sys
from subprocess import Popen
from typing import Optional
TIME = 3600
CMD = "run.py"
class Auto_Run():
def __init__(self, sleep_time, cmd):
if sys.version_info < (3, 6):
print("only support python 3.6 and later version")
sys.exit(1111)
self.sleep_time = sleep_time
self.cmd = cmd
self.ext = (cmd[-3:]).lower()
self.p:Optional[Popen[str]]= None
self.run()
try:
while 1:
time.sleep(sleep_time * 20)
self.poll = self.p.poll()
if self.p.poll() is None:
print("restarting......")
self.p.kill()
self.run()
else:
print("starting......")
self.run()
except KeyboardInterrupt as e:
print("exit???")
def run(self):
if self.ext == ".py":
print('start OK!')
python_path = sys.executable
print("use the absolute path of python to run", python_path)
self.p = subprocess.Popen([python_path, '%s' % self.cmd], stdin=sys.stdin, stdout=sys.stdout,
stderr=sys.stderr, shell=False)
else:
pass
app = Auto_Run(TIME, CMD) | true | true |
f71d0082d003aab3724f4c3350ac16a31aff1d21 | 11,526 | py | Python | ET/alfred/gen/render_trajs_from_et.py | amazon-research/multimodal-neuralslam | 530558fdfa31c6e048fc3e7b253f681f6786b04d | [
"MIT-0"
] | 4 | 2022-01-27T01:39:09.000Z | 2022-01-30T14:45:15.000Z | ET/alfred/gen/render_trajs_from_et.py | amazon-research/multimodal-neuralslam | 530558fdfa31c6e048fc3e7b253f681f6786b04d | [
"MIT-0"
] | 1 | 2022-02-05T14:06:25.000Z | 2022-02-05T14:06:25.000Z | ET/alfred/gen/render_trajs_from_et.py | amazon-research/multimodal-neuralslam | 530558fdfa31c6e048fc3e7b253f681f6786b04d | [
"MIT-0"
] | 1 | 2022-02-04T07:22:28.000Z | 2022-02-04T07:22:28.000Z | import os
import sys
import json
import numpy as np
import threading
import time
import copy
import random
import glob
import shutil
import pickle
from termcolor import colored
from sacred import Ingredient, Experiment
from alfred.env.thor_env import ThorEnv
from alfred.gen import constants
from alfred.gen.utils import augment_util, video_util
from alfred.utils import helper_util, model_util
args_ingredient = Ingredient('args')
ex = Experiment('render_trajs', ingredients=[args_ingredient])
@args_ingredient.config
def cfg_args():
# dataset folder to dump frames to
data_output = 'generated_2.1.0_exp_from_et_v2'
# dataset folder to load jsons from
data_input = 'json_2.1.0'
# smooth naviagation (like the original data)
smooth_nav = False
# time delays (like the original data)
time_delays = True
# whether to shuffle the order of augmenting
shuffle = False
# number of threads to start in parallel
num_threads = 16
# frame size to render
render_size = 300
# X server number
x_display = '0'
# render and save RGB images
render_frames = True
# render and save depth images
render_depth = False
# render and save class segmentation masks
render_class_masks = False
# render and save instance segmentation masks
render_instance_masks = False
# save object bounding boxes
save_detections = False
# partitions to render data for
partitions = ('tests_unseen',)
# whether to overwrite data folder if it already exists
overwrite = False
def setup_task(env, traj_data, args):
# scene setup
scene_num = traj_data['scene']['scene_num']
object_poses = traj_data['scene']['object_poses']
object_toggles = traj_data['scene']['object_toggles']
dirty_and_empty = traj_data['scene']['dirty_and_empty']
# reset
scene_name = 'FloorPlan%d' % scene_num
env.reset(scene_name, silent=True)
env.restore_scene(object_poses, object_toggles, dirty_and_empty)
assert traj_data['scene']['init_action']['horizon'] == 30
env.step(dict(traj_data['scene']['init_action']))
print('Task: {traj_data["task_id"]}' +
' {traj_data["task_type"]}' if 'task_type' in traj_data else '')
if 'task_type' in traj_data:
# setup task
env.set_task(traj_data, reward_type='dense')
augment_util.check_image(env.last_event.frame)
def batch(iterable, n=1):
l = len(iterable)
for ndx in range(0, l, n):
yield iterable[ndx:min(ndx + n, l)]
def augment_traj(env, json_file, args, video_saver, render_settings, test_mode):
# load json data
with open(json_file) as f:
traj_data = json.load(f)
traj_data['images'] = list()
root_dir_to, rendered_images_dir, save_settings = augment_util.prepare_for_traj(
json_file, args)
base_path = '/home/ubuntu/bak/ET/data/preds/et_human_synth'
if test_mode:
path = os.path.join(base_path, ':'.join(json_file.split('/')[-3:-1] + ['0']) + '.pkl') #####
else:
path = os.path.join(base_path, ':'.join(json_file.split('/')[-3:-1]) + '.pkl')
with open(path, 'rb') as f:
_, actions_ = pickle.load(f)
look_up = {'action': 'LookUp', 'forceAction': True}
look_down = {'action': 'LookDown', 'forceAction': True}
move = {'action': 'MoveAhead', 'forceAction': True}
left = {'action': 'RotateLeft', 'forceAction': True}
right = {'action': 'RotateRight', 'forceAction': True}
actions = ['RotateRight_90'] * 4
for acts in batch(actions_, 8):
actions.extend(acts)
actions.extend(['RotateRight_90'] * 4)
actions = actions[:225]
print(len(actions), 'length')
success = True
for save_idx, init_action in enumerate([look_up, look_down, None]):
setup_task(env, traj_data, args)
rewards, img_count = [], 0
if init_action is not None:
if not isinstance(init_action, list):
init_action = [init_action]
for act in init_action:
_, _ = augment_util.env_navigate(
act, env, save_settings, root_dir_to,
render_settings, args.smooth_nav, img_count, save=False)
for act in actions:
if act.startswith('Move'): cmd = move
if act.startswith('RotateLeft'): cmd = left
if act.startswith('RotateRight'): cmd = right
img_count_ = img_count # Save the image before the action execution.
event, img_count = augment_util.env_navigate(
cmd, env, save_settings, root_dir_to,
render_settings, args.smooth_nav, img_count, save_idx=save_idx)
if event is None:
print(path, 'event is None!!!!!')
success = False
break
if not event.metadata['lastActionSuccess']:
print(colored("Replay Failed: %s" % (
env.last_event.metadata['errorMessage']), 'red'))
print(path, 'event lastActionSuccess is False!!!!!')
success = False
break
# save 1 frame in the end and increase the counter by 10
# (to be alligned with the train data)
augment_util.save_image(env.last_event, root_dir_to, save_settings, img_count, None)
img_count += 10
return success
def start_worker(worker_id, traj_list, args, lock, processed_files_path, test_mode):
'''
worker loop
'''
if isinstance(args.x_display, (list, tuple)):
x_display = args.x_display[worker_id % len(args.x_display)]
else:
x_display = args.x_display
env = ThorEnv(x_display=x_display,
player_screen_width=args.render_size,
player_screen_height=args.render_size)
video_saver = video_util.VideoSaver()
render_settings = {
'renderImage': True, # otherwise other images won't be rendered as well
'renderDepthImage': args.render_depth,
'renderObjectImage': args.render_instance_masks,
'renderClassImage': args.render_class_masks}
while len(traj_list) > 0:
lock.acquire(timeout=120)
json_file = traj_list.pop()
json_path = os.path.join(args.data_input, json_file)
jsons_left = len(traj_list)
lock.release()
print('Rendering {} ({} left)'.format(json_path, jsons_left))
augment_success = augment_traj(
env, json_path, args, video_saver, render_settings, test_mode)
# update processed_files on the disk
lock.acquire(timeout=120)
with open(processed_files_path, 'a') as f:
f.write('{};{}'.format(json_file, int(augment_success)) + '\n')
model_util.update_log(
args.data_output, stage='augment', update='increase', progress=1)
lock.release()
env.stop()
print("Finished.")
@ex.automain
def main(args):
args = helper_util.AttrDict(**args)
if args.data_output is None:
raise RuntimeError('Please, specify the name of output dataset')
if (not args.render_frames and not args.render_depth
and not args.render_instance_masks and not args.render_class_masks):
raise RuntimeError('At least one type of images should be rendered')
# set up the paths
args.data_input = os.path.join(constants.ET_DATA, args.data_input)
print('Creating a dataset {} using data from {}'.format(
args.data_output, args.data_input))
if not os.path.isdir(args.data_input):
raise RuntimeError('The input dataset {} does not exist'.format(
args.data_input))
args.data_output = os.path.join(constants.ET_DATA, args.data_output)
processed_files_path = os.path.join(args.data_output, 'processed.txt')
if os.path.exists(args.data_output) and args.overwrite:
print('Erasing the old directory')
shutil.rmtree(args.data_output)
os.makedirs(args.data_output, exist_ok=True)
test_mode = True #####
# make a list of all the traj_data json files
traj_list = []
print('Indexing images in {}'.format(args.partitions))
for partition in args.partitions:
if test_mode:
file_list = glob.glob(os.path.join(args.data_input, partition, '*'))
else:
file_list = glob.glob(os.path.join(args.data_input, partition, '*/*'))
for dir_name in sorted(file_list):
if 'trial_' in os.path.basename(dir_name):
json_path = os.path.join(dir_name, 'traj_data.json')
# if 'trial_T20190909_101117_022448' not in json_path: continue
if not os.path.isfile(json_path):
continue
if test_mode:
traj_list.append('/'.join(json_path.split('/')[-3:]))
else:
traj_list.append('/'.join(json_path.split('/')[-4:]))
start_idx = 0
#chunk_size = len(traj_list) // 3
#traj_list = traj_list[chunk_size * start_idx : chunk_size * (start_idx + 1)]
num_files, num_processed_files = len(traj_list), 0
# remove jsons that were already processed
if os.path.exists(processed_files_path):
with open(processed_files_path) as f:
processed_files = set(
[line.strip().split(';')[0] for line in f.readlines()])
# check whether which files are in the desired partitions
processed_files = set(
[f for f in processed_files if f.split('/')[0] in args.partitions])
traj_list = [traj for traj in traj_list if traj not in processed_files]
num_processed_files += len(processed_files)
print('{} jsons were already processed'.format(num_processed_files))
print(colored('The total number of triajectories to process is {}'.format(
len(traj_list)), 'yellow'))
model_util.save_log(args.data_output, progress=num_processed_files,
total=num_files, stage='augment')
# random shuffle
if args.shuffle:
random.shuffle(traj_list)
lock = threading.Lock()
if args.num_threads > 0 and False:
# start threads
threads = []
for worker_id in range(min(args.num_threads, len(traj_list))):
thread = threading.Thread(
target=start_worker,
args=(worker_id, traj_list, args, lock, processed_files_path, test_mode))
threads.append(thread)
thread.start()
time.sleep(1)
for thread in threads:
thread.join()
else:
# run in the main thread
start_worker(0, traj_list, args, lock, processed_files_path, test_mode)
return
with open(processed_files_path) as f:
num_processed_files = len(f.readlines())
if num_files != num_processed_files:
print(colored('{} trajectories were skipped'.format(
num_files - num_processed_files), 'red'))
else:
print(colored('All trajectories were successfully recorded', 'green'))
#print('Copying tests folders')
#if not os.path.exists(os.path.join(args.data_output, 'tests_seen')):
# shutil.copytree(os.path.join(args.data_input, 'tests_seen'),
# os.path.join(args.data_output, 'tests_seen'))
#if not os.path.exists(os.path.join(args.data_output, 'tests_unseen')):
# shutil.copytree(os.path.join(args.data_input, 'tests_unseen'),
# os.path.join(args.data_output, 'tests_unseen'))
print('The generated dataset is saved to {}'.format(args.data_output))
| 38.42 | 100 | 0.642461 | import os
import sys
import json
import numpy as np
import threading
import time
import copy
import random
import glob
import shutil
import pickle
from termcolor import colored
from sacred import Ingredient, Experiment
from alfred.env.thor_env import ThorEnv
from alfred.gen import constants
from alfred.gen.utils import augment_util, video_util
from alfred.utils import helper_util, model_util
args_ingredient = Ingredient('args')
ex = Experiment('render_trajs', ingredients=[args_ingredient])
@args_ingredient.config
def cfg_args():
data_output = 'generated_2.1.0_exp_from_et_v2'
data_input = 'json_2.1.0'
smooth_nav = False
time_delays = True
shuffle = False
num_threads = 16
render_size = 300
x_display = '0'
render_frames = True
render_depth = False
render_class_masks = False
render_instance_masks = False
save_detections = False
partitions = ('tests_unseen',)
overwrite = False
def setup_task(env, traj_data, args):
scene_num = traj_data['scene']['scene_num']
object_poses = traj_data['scene']['object_poses']
object_toggles = traj_data['scene']['object_toggles']
dirty_and_empty = traj_data['scene']['dirty_and_empty']
scene_name = 'FloorPlan%d' % scene_num
env.reset(scene_name, silent=True)
env.restore_scene(object_poses, object_toggles, dirty_and_empty)
assert traj_data['scene']['init_action']['horizon'] == 30
env.step(dict(traj_data['scene']['init_action']))
print('Task: {traj_data["task_id"]}' +
' {traj_data["task_type"]}' if 'task_type' in traj_data else '')
if 'task_type' in traj_data:
env.set_task(traj_data, reward_type='dense')
augment_util.check_image(env.last_event.frame)
def batch(iterable, n=1):
l = len(iterable)
for ndx in range(0, l, n):
yield iterable[ndx:min(ndx + n, l)]
def augment_traj(env, json_file, args, video_saver, render_settings, test_mode):
with open(json_file) as f:
traj_data = json.load(f)
traj_data['images'] = list()
root_dir_to, rendered_images_dir, save_settings = augment_util.prepare_for_traj(
json_file, args)
base_path = '/home/ubuntu/bak/ET/data/preds/et_human_synth'
if test_mode:
path = os.path.join(base_path, ':'.join(json_file.split('/')[-3:-1] + ['0']) + '.pkl')
path = os.path.join(base_path, ':'.join(json_file.split('/')[-3:-1]) + '.pkl')
with open(path, 'rb') as f:
_, actions_ = pickle.load(f)
look_up = {'action': 'LookUp', 'forceAction': True}
look_down = {'action': 'LookDown', 'forceAction': True}
move = {'action': 'MoveAhead', 'forceAction': True}
left = {'action': 'RotateLeft', 'forceAction': True}
right = {'action': 'RotateRight', 'forceAction': True}
actions = ['RotateRight_90'] * 4
for acts in batch(actions_, 8):
actions.extend(acts)
actions.extend(['RotateRight_90'] * 4)
actions = actions[:225]
print(len(actions), 'length')
success = True
for save_idx, init_action in enumerate([look_up, look_down, None]):
setup_task(env, traj_data, args)
rewards, img_count = [], 0
if init_action is not None:
if not isinstance(init_action, list):
init_action = [init_action]
for act in init_action:
_, _ = augment_util.env_navigate(
act, env, save_settings, root_dir_to,
render_settings, args.smooth_nav, img_count, save=False)
for act in actions:
if act.startswith('Move'): cmd = move
if act.startswith('RotateLeft'): cmd = left
if act.startswith('RotateRight'): cmd = right
img_count_ = img_count
event, img_count = augment_util.env_navigate(
cmd, env, save_settings, root_dir_to,
render_settings, args.smooth_nav, img_count, save_idx=save_idx)
if event is None:
print(path, 'event is None!!!!!')
success = False
break
if not event.metadata['lastActionSuccess']:
print(colored("Replay Failed: %s" % (
env.last_event.metadata['errorMessage']), 'red'))
print(path, 'event lastActionSuccess is False!!!!!')
success = False
break
augment_util.save_image(env.last_event, root_dir_to, save_settings, img_count, None)
img_count += 10
return success
def start_worker(worker_id, traj_list, args, lock, processed_files_path, test_mode):
if isinstance(args.x_display, (list, tuple)):
x_display = args.x_display[worker_id % len(args.x_display)]
else:
x_display = args.x_display
env = ThorEnv(x_display=x_display,
player_screen_width=args.render_size,
player_screen_height=args.render_size)
video_saver = video_util.VideoSaver()
render_settings = {
'renderImage': True,
'renderDepthImage': args.render_depth,
'renderObjectImage': args.render_instance_masks,
'renderClassImage': args.render_class_masks}
while len(traj_list) > 0:
lock.acquire(timeout=120)
json_file = traj_list.pop()
json_path = os.path.join(args.data_input, json_file)
jsons_left = len(traj_list)
lock.release()
print('Rendering {} ({} left)'.format(json_path, jsons_left))
augment_success = augment_traj(
env, json_path, args, video_saver, render_settings, test_mode)
# update processed_files on the disk
lock.acquire(timeout=120)
with open(processed_files_path, 'a') as f:
f.write('{};{}'.format(json_file, int(augment_success)) + '\n')
model_util.update_log(
args.data_output, stage='augment', update='increase', progress=1)
lock.release()
env.stop()
print("Finished.")
@ex.automain
def main(args):
args = helper_util.AttrDict(**args)
if args.data_output is None:
raise RuntimeError('Please, specify the name of output dataset')
if (not args.render_frames and not args.render_depth
and not args.render_instance_masks and not args.render_class_masks):
raise RuntimeError('At least one type of images should be rendered')
# set up the paths
args.data_input = os.path.join(constants.ET_DATA, args.data_input)
print('Creating a dataset {} using data from {}'.format(
args.data_output, args.data_input))
if not os.path.isdir(args.data_input):
raise RuntimeError('The input dataset {} does not exist'.format(
args.data_input))
args.data_output = os.path.join(constants.ET_DATA, args.data_output)
processed_files_path = os.path.join(args.data_output, 'processed.txt')
if os.path.exists(args.data_output) and args.overwrite:
print('Erasing the old directory')
shutil.rmtree(args.data_output)
os.makedirs(args.data_output, exist_ok=True)
test_mode = True #####
# make a list of all the traj_data json files
traj_list = []
print('Indexing images in {}'.format(args.partitions))
for partition in args.partitions:
if test_mode:
file_list = glob.glob(os.path.join(args.data_input, partition, '*'))
else:
file_list = glob.glob(os.path.join(args.data_input, partition, '*/*'))
for dir_name in sorted(file_list):
if 'trial_' in os.path.basename(dir_name):
json_path = os.path.join(dir_name, 'traj_data.json')
# if 'trial_T20190909_101117_022448' not in json_path: continue
if not os.path.isfile(json_path):
continue
if test_mode:
traj_list.append('/'.join(json_path.split('/')[-3:]))
else:
traj_list.append('/'.join(json_path.split('/')[-4:]))
start_idx = 0
#chunk_size = len(traj_list) // 3
#traj_list = traj_list[chunk_size * start_idx : chunk_size * (start_idx + 1)]
num_files, num_processed_files = len(traj_list), 0
# remove jsons that were already processed
if os.path.exists(processed_files_path):
with open(processed_files_path) as f:
processed_files = set(
[line.strip().split(';')[0] for line in f.readlines()])
# check whether which files are in the desired partitions
processed_files = set(
[f for f in processed_files if f.split('/')[0] in args.partitions])
traj_list = [traj for traj in traj_list if traj not in processed_files]
num_processed_files += len(processed_files)
print('{} jsons were already processed'.format(num_processed_files))
print(colored('The total number of triajectories to process is {}'.format(
len(traj_list)), 'yellow'))
model_util.save_log(args.data_output, progress=num_processed_files,
total=num_files, stage='augment')
# random shuffle
if args.shuffle:
random.shuffle(traj_list)
lock = threading.Lock()
if args.num_threads > 0 and False:
# start threads
threads = []
for worker_id in range(min(args.num_threads, len(traj_list))):
thread = threading.Thread(
target=start_worker,
args=(worker_id, traj_list, args, lock, processed_files_path, test_mode))
threads.append(thread)
thread.start()
time.sleep(1)
for thread in threads:
thread.join()
else:
# run in the main thread
start_worker(0, traj_list, args, lock, processed_files_path, test_mode)
return
with open(processed_files_path) as f:
num_processed_files = len(f.readlines())
if num_files != num_processed_files:
print(colored('{} trajectories were skipped'.format(
num_files - num_processed_files), 'red'))
else:
print(colored('All trajectories were successfully recorded', 'green'))
#print('Copying tests folders')
#if not os.path.exists(os.path.join(args.data_output, 'tests_seen')):
# shutil.copytree(os.path.join(args.data_input, 'tests_seen'),
# os.path.join(args.data_output, 'tests_seen'))
#if not os.path.exists(os.path.join(args.data_output, 'tests_unseen')):
# shutil.copytree(os.path.join(args.data_input, 'tests_unseen'),
# os.path.join(args.data_output, 'tests_unseen'))
print('The generated dataset is saved to {}'.format(args.data_output))
| true | true |
f71d01b7f7dd99438c8b39dc984dbad4ab9a5f08 | 993 | py | Python | cacao_accounting/contabilidad/registros/ccosto.py | cacao-accounting/cacao-accounting-mockup | ca4da3b4e48a4796fc39a12a482d9ccc1e8e6e38 | [
"Apache-2.0"
] | 2 | 2021-08-19T01:29:49.000Z | 2021-09-11T16:21:40.000Z | cacao_accounting/contabilidad/registros/ccosto.py | cacao-accounting/cacao-accounting-mockup | ca4da3b4e48a4796fc39a12a482d9ccc1e8e6e38 | [
"Apache-2.0"
] | 29 | 2020-07-14T23:59:32.000Z | 2021-11-04T07:42:31.000Z | cacao_accounting/contabilidad/registros/ccosto.py | cacao-accounting/cacao-accounting | 3e6fa0080db3d44b1b3b8d93e46bd96a5a8d515b | [
"Apache-2.0"
] | 3 | 2020-07-12T00:52:18.000Z | 2021-08-18T01:09:22.000Z | # Copyright 2020 William José Moreno Reyes
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Contributors:
# - William José Moreno Reyes
"""Administración centros de costos."""
from cacao_accounting.registro import Registro
class RegistroCentroCosto(Registro):
"""Registro para la administración de Centros de Costos."""
def __init__(self):
"""Administración centros de costos."""
from cacao_accounting.database import CentroCosto
self.tabla = CentroCosto
| 32.032258 | 74 | 0.747231 |
from cacao_accounting.registro import Registro
class RegistroCentroCosto(Registro):
def __init__(self):
from cacao_accounting.database import CentroCosto
self.tabla = CentroCosto
| true | true |
f71d01f2b0c1b5a7dec480a4a5104f45b41cb83c | 543 | py | Python | keras/preprocessing/sequence.py | seba-1511/gsoc15-demo | 7fa542f33fdb39d73e2b11318c046ecf35fb9bcf | [
"MIT"
] | 2 | 2015-09-15T19:19:24.000Z | 2019-04-21T12:10:27.000Z | keras/preprocessing/sequence.py | wavelets/keras | c57d5cce7903511edd4048f8bfed2ad0dc6f6b6b | [
"MIT"
] | null | null | null | keras/preprocessing/sequence.py | wavelets/keras | c57d5cce7903511edd4048f8bfed2ad0dc6f6b6b | [
"MIT"
] | null | null | null | import numpy as np
def pad_sequences(seqs, maxlen=None, dtype='int32'):
"""
Pad each sequence to the same lenght:
the lenght of the longuest sequence.
If maxlen is provided, any sequence longer
than maxlen is truncated to maxlen.
"""
lengths = [len(s) for s in seqs]
nb_samples = len(seqs)
if maxlen is None:
maxlen = np.max(lengths)
x = np.zeros((nb_samples, maxlen)).astype(dtype)
for idx, s in enumerate(seqs):
x[idx, :lengths[idx]] = s[:maxlen]
return x | 25.857143 | 52 | 0.615101 | import numpy as np
def pad_sequences(seqs, maxlen=None, dtype='int32'):
lengths = [len(s) for s in seqs]
nb_samples = len(seqs)
if maxlen is None:
maxlen = np.max(lengths)
x = np.zeros((nb_samples, maxlen)).astype(dtype)
for idx, s in enumerate(seqs):
x[idx, :lengths[idx]] = s[:maxlen]
return x | true | true |
f71d033f264f8a2c8ac1bb10e308f59fe6a66bae | 7,853 | py | Python | example/add.py | tjengbudi/python-shopee | a74e99e7a900ed0a3c0cba2b7405238acf2ee16c | [
"MIT"
] | 166 | 2018-04-25T16:43:30.000Z | 2022-03-20T07:07:39.000Z | example/add.py | tjengbudi/python-shopee | a74e99e7a900ed0a3c0cba2b7405238acf2ee16c | [
"MIT"
] | 34 | 2018-11-27T02:56:08.000Z | 2022-01-28T05:24:57.000Z | example/add.py | tjengbudi/python-shopee | a74e99e7a900ed0a3c0cba2b7405238acf2ee16c | [
"MIT"
] | 62 | 2018-06-12T02:53:34.000Z | 2022-03-13T07:31:34.000Z | import pyshopee
import re
import pandas as pd
from pprint import pprint
def _builder_attributes(attributes_resp, brand_option = None, default_brand_option = "自有品牌"):
'''select mandatory attr.
attributes = [
{
'attributes_id': 1365,
'value': 'Napla(娜普菈)'
}
]
'''
attributes = []
# in case attributes response is not define in api response
if attributes_resp.get("attributes"):
for ele in attributes_resp.get("attributes"):
if ele.get("is_mandatory") and ele.get("attribute_name")=='品牌':
attributes.append(
{
"attributes_id": ele.get("attribute_id"),
"value": brand_option if brand_option else default_brand_option
})
elif ele.get("is_mandatory"):
attributes.append(
{
# checking the value if value can radom or set as " "
"attributes_id": ele.get("attribute_id"),
"value": ele.get("options")[0] if len(ele.get("options")) > 0 else " ",
})
else:
pass
else:
return None
return attributes
def _builder_logistics(**params):
'''
logistics = [
# 'logistic_name': '黑貓宅急便'
{
'logistic_id': 30001,
'enabled':False
},
# 'logistic_name': '7-11',
{
'logistic_id': 30005,
'enabled':False
},
# 'logistic_name': '全家',
{
'logistic_id': 30006,
'enabled':False
},
# 'logistic_name': '萊爾富',
{
'logistic_id': 30007,
'enabled':False
},
# 'logistic_name': 'OK Mart',
{
'logistic_id': 30008,
'enabled':False
},
# 'logistic_name': '中華郵政',
{
'logistic_id': 39303,
'enabled':False
},
# 'logistic_name': '賣家宅配',
{
'logistic_id': 39304,
'enabled':False
},
# 'logistic_name': '宅配',
{
'logistic_id': 39307,
'enabled':True
}
]
'''
logistics = list()
resp = shopee.logistic.get_logistics()
logistics_resp = resp.get("logistics")
for logis in logistics_resp:
if logis.get('enabled'):
# logistics.append({
# 'logistic_id': logis.get('logistic_id'),
# 'enabled': logis.get('enabled')
# })
if logis.get('fee_type') == 'SIZE_SELECTION':
logis['sizes'] = logis['sizes'][0]['size_id']
else:
logistics.append(logis)
return logistics
def _builder_images(single, **params):
'''
images = [
{
"url": "https://cfshopeetw-a.akamaihd.net/file/b77c9b16ec1dd734c0c663fd1fcb8ac0"
},
{
"url": 'https://cfshopeetw-a.akamaihd.net/file/b77c9b16ec1dd734c0c663fd1fcb8ac0'
},
{
"url": 'https://cfshopeetw-a.akamaihd.net/file/b77c9b16ec1dd734c0c663fd1fcb8ac0'
},
{
"url": 'https://cfshopeetw-a.akamaihd.net/file/b77c9b16ec1dd734c0c663fd1fcb8ac0'
}
]
'''
images_container = []
images_container.extend( single.get("images").split(",") )
images = []
for img in images_container:
images.append(
{
"url": "https://cfshopeetw-a.akamaihd.net/file/" + str(img)
}
)
return images
def _builder_variations(data, **params):
'''
variations = [
{
"name": "Black",
"stock": 1,
"price": 1999.0,
"variation_sku": "SKU-ABCD-EFG0-002"
},
{
"name": "Red",
"stock": 1,
"price": 2999.0,
"variation_sku": "SKU-ABCD-EFG0-003"
}
]
'''
multi = len(data) if len(data) > 1 else None
variations_container = []
if multi:
for ele in data:
variations = {}
# check
if ele["modelid"] == 0 or ele["model_status"] == 0:
pass
else:
variations.setdefault("name",ele["model_name"].strip())
variations.setdefault("stock",1)
variations.setdefault("price",ele["model_price"])
if ele.get("variation_sku"):
variations.setdefault("variation_sku",ele.get("variation_sku"))
variations_container.append(variations)
return variations_container
else:
return None
def _builder_weight(single, default_weight=0.1, **params):
''' the net weight of this item, the unit is KG.
- type: float
- require: yes
'''
if single.get("item_weight"):
weight = single.get("item_weight")/100000
else:
weight = default_weight
return float(weight)
def _cleaning_hashtag(description, **params):
hashtag_pattern = re.compile(r"#(.*)[\s]{0,1}", flags=re.UNICODE)
cleaned_description = hashtag_pattern.sub(r' ', description)
return cleaned_description
if __name__ == '__main__':
# build the connection
shopee = pyshopee.Client( shop_id= your_shopid,
partner_id=your_partner_id,
secret_key=your_secret_key )
# build your data in here
single = {
"category_id":category_id,
"item_name":item_name,
"descriptio":descriptio,
"item_price":item_price,
"item_weight":item_weight,
"category_id":category_id,
"images":images
}
product_data = {
"category_id": single.get("category_id"),
"name": single.get("item_name").strip(),
"description": _cleaning_hashtag(description =single.get("description") ),
"price": single.get("item_price") if single.get("item_price") > 0 else data[1].get("item_price"),
"stock": 1,
"weight": _builder_weight(single=single, default_weight=0.1),
# "variations": variations,
"images": _builder_images(single=single),
# "attributes": _builder_attributes( attributes_resp = shopee.item.get_attributes(category_id=int(single["category_id"])),
# brand_option = single.get("value"),
# default_brand_option = "自有品牌" ),
"logistics": _builder_logistics(),
# "package_length": 200,
# "package_width": 200,
# "package_height": 200,
# "days_to_ship": 10,
# "wholesales": wholesales
}
attributes = _builder_attributes( attributes_resp = shopee.item.get_attributes(category_id=int(single["category_id"])),
brand_option = single.get("value"),
default_brand_option = "自有品牌" )
if attributes:
product_data.setdefault("attributes",attributes)
variations = _builder_variations(data=data)
if variations:
product_data.setdefault("variations",variations)
# adding process
response = shopee.item.add(product_data=product_data)
pprint(response) | 29.859316 | 134 | 0.492041 | import pyshopee
import re
import pandas as pd
from pprint import pprint
def _builder_attributes(attributes_resp, brand_option = None, default_brand_option = "自有品牌"):
attributes = []
if attributes_resp.get("attributes"):
for ele in attributes_resp.get("attributes"):
if ele.get("is_mandatory") and ele.get("attribute_name")=='品牌':
attributes.append(
{
"attributes_id": ele.get("attribute_id"),
"value": brand_option if brand_option else default_brand_option
})
elif ele.get("is_mandatory"):
attributes.append(
{
"attributes_id": ele.get("attribute_id"),
"value": ele.get("options")[0] if len(ele.get("options")) > 0 else " ",
})
else:
pass
else:
return None
return attributes
def _builder_logistics(**params):
logistics = list()
resp = shopee.logistic.get_logistics()
logistics_resp = resp.get("logistics")
for logis in logistics_resp:
if logis.get('enabled'):
if logis.get('fee_type') == 'SIZE_SELECTION':
logis['sizes'] = logis['sizes'][0]['size_id']
else:
logistics.append(logis)
return logistics
def _builder_images(single, **params):
images_container = []
images_container.extend( single.get("images").split(",") )
images = []
for img in images_container:
images.append(
{
"url": "https://cfshopeetw-a.akamaihd.net/file/" + str(img)
}
)
return images
def _builder_variations(data, **params):
multi = len(data) if len(data) > 1 else None
variations_container = []
if multi:
for ele in data:
variations = {}
if ele["modelid"] == 0 or ele["model_status"] == 0:
pass
else:
variations.setdefault("name",ele["model_name"].strip())
variations.setdefault("stock",1)
variations.setdefault("price",ele["model_price"])
if ele.get("variation_sku"):
variations.setdefault("variation_sku",ele.get("variation_sku"))
variations_container.append(variations)
return variations_container
else:
return None
def _builder_weight(single, default_weight=0.1, **params):
if single.get("item_weight"):
weight = single.get("item_weight")/100000
else:
weight = default_weight
return float(weight)
def _cleaning_hashtag(description, **params):
hashtag_pattern = re.compile(r"#(.*)[\s]{0,1}", flags=re.UNICODE)
cleaned_description = hashtag_pattern.sub(r' ', description)
return cleaned_description
if __name__ == '__main__':
shopee = pyshopee.Client( shop_id= your_shopid,
partner_id=your_partner_id,
secret_key=your_secret_key )
single = {
"category_id":category_id,
"item_name":item_name,
"descriptio":descriptio,
"item_price":item_price,
"item_weight":item_weight,
"category_id":category_id,
"images":images
}
product_data = {
"category_id": single.get("category_id"),
"name": single.get("item_name").strip(),
"description": _cleaning_hashtag(description =single.get("description") ),
"price": single.get("item_price") if single.get("item_price") > 0 else data[1].get("item_price"),
"stock": 1,
"weight": _builder_weight(single=single, default_weight=0.1),
"images": _builder_images(single=single),
"logistics": _builder_logistics(),
}
attributes = _builder_attributes( attributes_resp = shopee.item.get_attributes(category_id=int(single["category_id"])),
brand_option = single.get("value"),
default_brand_option = "自有品牌" )
if attributes:
product_data.setdefault("attributes",attributes)
variations = _builder_variations(data=data)
if variations:
product_data.setdefault("variations",variations)
response = shopee.item.add(product_data=product_data)
pprint(response) | true | true |
f71d035297f489215598c4103d58e28bf84e12d5 | 7,753 | py | Python | transformer/third_party/feat_convert/kaldi_io/batchmk.py | wxy1988/ASR | 8ef3ef347523044c89c46c263ecc7b8e9b2c06d1 | [
"Apache-2.0"
] | 60 | 2018-08-21T08:07:31.000Z | 2021-11-08T10:40:17.000Z | transformer/third_party/feat_convert/kaldi_io/batchmk.py | wxy1988/ASR | 8ef3ef347523044c89c46c263ecc7b8e9b2c06d1 | [
"Apache-2.0"
] | 7 | 2018-10-23T08:50:15.000Z | 2021-11-15T09:32:29.000Z | transformer/third_party/feat_convert/kaldi_io/batchmk.py | wxy1988/ASR | 8ef3ef347523044c89c46c263ecc7b8e9b2c06d1 | [
"Apache-2.0"
] | 29 | 2018-09-21T06:11:03.000Z | 2022-02-18T07:12:58.000Z | #!/usr/bin/python
# coding=utf-8
"""
@version:
@author: Dong Linhao
@license: Apache Licence
@contact: donglinhao2015@ia.ac.cn
@site:
@software: PyCharm Community Edition
@file: batchmk.py
@time: 09/04/17 21:10
"""
import src.io.fea as fea
import tensorflow as tf
import numpy as np
import time
LONGEST_FRMS = 2000
class lstm_batch(object):
def __init__(self, num_streams, num_steps, input_dim):
self.sample_feat_list = [np.zeros([LONGEST_FRMS, input_dim]) for _ in range(num_streams)]
self.sample_label_list = [np.zeros([LONGEST_FRMS]) for _ in range(num_streams)]
self.sample_mask_list = [np.zeros([LONGEST_FRMS]) for _ in range(num_streams)]
self.curt = np.zeros(num_streams, dtype=int)
self.lent = np.zeros(num_streams, dtype=int)
self.reset_flag = np.zeros(num_streams, dtype=bool)
self.num_streams = num_streams
self.num_steps = num_steps
self.input_dim = input_dim
self.handled_utt_num = 0
self.handled_frm_num = 0
self.cur_epoch_finish = False
def set_stream_num(self, num_streams):
self.num_streams = num_streams
self.sample_feat_list = [np.zeros([LONGEST_FRMS, self.input_dim]) for _ in range(num_streams)]
self.sample_label_list = [np.zeros([LONGEST_FRMS]) for _ in range(num_streams)]
self.sample_mask_list = [np.zeros([LONGEST_FRMS]) for _ in range(num_streams)]
self.curt = np.zeros(num_streams, dtype=int)
self.lent = np.zeros(num_streams, dtype=int)
self.reset_flag = np.zeros(num_streams, dtype=bool)
def reset(self):
self.sample_feat_list = [np.zeros([LONGEST_FRMS, self.input_dim]) for _ in range(self.num_streams)]
self.sample_label_list = [np.zeros([LONGEST_FRMS]) for _ in range(self.num_streams)]
self.sample_mask_list = [np.zeros([LONGEST_FRMS]) for _ in range(self.num_streams)]
self.curt = np.zeros(self.num_streams, dtype=int)
self.lent = np.zeros(self.num_streams, dtype=int)
self.reset_flag = np.zeros(self.num_streams, dtype=bool)
self.handled_utt_num = 0
self.handled_frm_num = 0
self.cur_epoch_finish = False
def make_batch(self, sess, sample, run_device, total_utt_num):
with tf.device(run_device):
multistream_feat_batch = [np.zeros([self.num_steps, self.input_dim]) for _ in range(self.num_streams)]
multistream_label_batch = [np.zeros([self.num_steps]) for _ in range(self.num_streams)]
multistream_mask_batch = [np.zeros([self.num_steps]) for _ in range(self.num_streams)]
reset_flag = np.zeros(self.num_streams, dtype=bool)
for s in range(self.num_streams):
if self.curt[s] < self.lent[s]:
reset_flag[s] = False
continue
if self.handled_utt_num < total_utt_num:
sample_feats, sample_labels, sample_masks = sess.run(sample)
self.handled_utt_num += 1
self.sample_feat_list[s] = sample_feats
self.sample_label_list[s] = sample_labels
self.sample_mask_list[s] = sample_masks
self.lent[s] = np.shape(sample_feats)[0]
self.curt[s] = 0
reset_flag[s] = True
for s in range(self.num_streams):
if self.curt[s] < self.lent[s]:
multistream_feat_batch[s] = self.sample_feat_list[s][self.curt[s]:self.curt[s]+self.num_steps, :]
multistream_label_batch[s] = self.sample_label_list[s][self.curt[s]:self.curt[s]+self.num_steps]
multistream_mask_batch[s] = self.sample_mask_list[s][self.curt[s]:self.curt[s]+self.num_steps]
self.curt[s] += self.num_steps
self.handled_frm_num += np.sum(multistream_mask_batch[s])
else:
multistream_mask_batch[s] = np.zeros([self.num_steps])
final_feat_batch = np.stack(multistream_feat_batch, axis=1)
final_label_batch = np.stack(multistream_label_batch, axis=1)
final_mask_batch = np.stack(multistream_mask_batch, axis=1)
done = True
for s in range(self.num_streams):
if self.curt[s] < self.lent[s]:
done = False
if done:
self.cur_epoch_finish = True
return final_feat_batch, final_label_batch, final_mask_batch, reset_flag
def getfilelst(scp_file_path):
# get tf list
tf_list = []
with open(scp_file_path) as list_file:
for line in list_file:
tf_list.append(line.strip())
return tf_list
def process_my_feature(feature, label, flags):
# Add delta
if flags.add_delta:
feature = fea.tf_fea_add_delt(feature)
# CMVN
feature = fea.tf_fea_cmvn_global(feature, flags.feat_mean, flags.feat_var)
# Splice
feature = fea.tf_fea_splice(feature, flags.l_splice, flags.r_splice)
feature = tf.reshape(feature, [-1, flags.input_dim])
return feature[:], label[:]
def read_my_file_format(filename_queue, org_feat_dim):
# build reader
reader = tf.TFRecordReader()
_, serialized_example = reader.read(filename_queue)
raw_example = tf.parse_single_example(
serialized_example,
# Defaults are not specified since both keys are required.
features={
'feat': tf.FixedLenFeature([], tf.string),
'label': tf.FixedLenFeature([], tf.string),
})
example = tf.decode_raw(raw_example['feat'], tf.float32)
example = tf.reshape(example, [-1, org_feat_dim])
label = tf.decode_raw(raw_example['label'], tf.int32)
return example, label
def lstm_input_pipeline(flags, is_training, num_epochs=None, shuffle_state = True):
with tf.device(flags.default_device):
if is_training:
filenames = getfilelst(flags.trn_data_dir + '/tf.lst')
else:
filenames = getfilelst(flags.cv_data_dir + '/tf.lst')
# generate file queue
filename_queue = tf.train.string_input_producer(
filenames, num_epochs = num_epochs, shuffle = shuffle_state)
# read from file queue
sample = read_my_file_format(filename_queue, flags.org_feat_dim)
# handle sample
sample_feats, sample_labels = process_my_feature(sample[0], sample[1], flags)
sample_length = tf.shape(sample_feats)[0]
sample_masks = tf.ones([sample_length], dtype=tf.float32)
# add target delay
if flags.target_delay > 0:
feats_part1 = tf.slice(sample_feats, [flags.target_delay, 0], [sample_length-flags.target_delay, -1])
last_frm_feats = tf.slice(sample_feats, [sample_length-1, 0], [1, -1])
feats_part2 = tf.concat([last_frm_feats for _ in range(flags.target_delay)], axis=0)
sample_feats = tf.concat([feats_part1, feats_part2], axis=0)
padding_length = flags.num_steps - sample_length % flags.num_steps
padding_feats = tf.zeros([padding_length, flags.input_dim], dtype=tf.float32)
feats = tf.concat(axis=0, values=[sample_feats, padding_feats])
padding_labels = tf.zeros([padding_length], dtype=tf.int32)
labels = tf.concat(axis=0, values=[sample_labels, padding_labels])
padding_masks = tf.zeros([padding_length], dtype=tf.float32)
frame_masks = tf.concat(axis=0, values=[sample_masks, padding_masks])
return feats, labels, frame_masks
| 41.682796 | 118 | 0.632013 |
import src.io.fea as fea
import tensorflow as tf
import numpy as np
import time
LONGEST_FRMS = 2000
class lstm_batch(object):
def __init__(self, num_streams, num_steps, input_dim):
self.sample_feat_list = [np.zeros([LONGEST_FRMS, input_dim]) for _ in range(num_streams)]
self.sample_label_list = [np.zeros([LONGEST_FRMS]) for _ in range(num_streams)]
self.sample_mask_list = [np.zeros([LONGEST_FRMS]) for _ in range(num_streams)]
self.curt = np.zeros(num_streams, dtype=int)
self.lent = np.zeros(num_streams, dtype=int)
self.reset_flag = np.zeros(num_streams, dtype=bool)
self.num_streams = num_streams
self.num_steps = num_steps
self.input_dim = input_dim
self.handled_utt_num = 0
self.handled_frm_num = 0
self.cur_epoch_finish = False
def set_stream_num(self, num_streams):
self.num_streams = num_streams
self.sample_feat_list = [np.zeros([LONGEST_FRMS, self.input_dim]) for _ in range(num_streams)]
self.sample_label_list = [np.zeros([LONGEST_FRMS]) for _ in range(num_streams)]
self.sample_mask_list = [np.zeros([LONGEST_FRMS]) for _ in range(num_streams)]
self.curt = np.zeros(num_streams, dtype=int)
self.lent = np.zeros(num_streams, dtype=int)
self.reset_flag = np.zeros(num_streams, dtype=bool)
def reset(self):
self.sample_feat_list = [np.zeros([LONGEST_FRMS, self.input_dim]) for _ in range(self.num_streams)]
self.sample_label_list = [np.zeros([LONGEST_FRMS]) for _ in range(self.num_streams)]
self.sample_mask_list = [np.zeros([LONGEST_FRMS]) for _ in range(self.num_streams)]
self.curt = np.zeros(self.num_streams, dtype=int)
self.lent = np.zeros(self.num_streams, dtype=int)
self.reset_flag = np.zeros(self.num_streams, dtype=bool)
self.handled_utt_num = 0
self.handled_frm_num = 0
self.cur_epoch_finish = False
def make_batch(self, sess, sample, run_device, total_utt_num):
with tf.device(run_device):
multistream_feat_batch = [np.zeros([self.num_steps, self.input_dim]) for _ in range(self.num_streams)]
multistream_label_batch = [np.zeros([self.num_steps]) for _ in range(self.num_streams)]
multistream_mask_batch = [np.zeros([self.num_steps]) for _ in range(self.num_streams)]
reset_flag = np.zeros(self.num_streams, dtype=bool)
for s in range(self.num_streams):
if self.curt[s] < self.lent[s]:
reset_flag[s] = False
continue
if self.handled_utt_num < total_utt_num:
sample_feats, sample_labels, sample_masks = sess.run(sample)
self.handled_utt_num += 1
self.sample_feat_list[s] = sample_feats
self.sample_label_list[s] = sample_labels
self.sample_mask_list[s] = sample_masks
self.lent[s] = np.shape(sample_feats)[0]
self.curt[s] = 0
reset_flag[s] = True
for s in range(self.num_streams):
if self.curt[s] < self.lent[s]:
multistream_feat_batch[s] = self.sample_feat_list[s][self.curt[s]:self.curt[s]+self.num_steps, :]
multistream_label_batch[s] = self.sample_label_list[s][self.curt[s]:self.curt[s]+self.num_steps]
multistream_mask_batch[s] = self.sample_mask_list[s][self.curt[s]:self.curt[s]+self.num_steps]
self.curt[s] += self.num_steps
self.handled_frm_num += np.sum(multistream_mask_batch[s])
else:
multistream_mask_batch[s] = np.zeros([self.num_steps])
final_feat_batch = np.stack(multistream_feat_batch, axis=1)
final_label_batch = np.stack(multistream_label_batch, axis=1)
final_mask_batch = np.stack(multistream_mask_batch, axis=1)
done = True
for s in range(self.num_streams):
if self.curt[s] < self.lent[s]:
done = False
if done:
self.cur_epoch_finish = True
return final_feat_batch, final_label_batch, final_mask_batch, reset_flag
def getfilelst(scp_file_path):
tf_list = []
with open(scp_file_path) as list_file:
for line in list_file:
tf_list.append(line.strip())
return tf_list
def process_my_feature(feature, label, flags):
if flags.add_delta:
feature = fea.tf_fea_add_delt(feature)
feature = fea.tf_fea_cmvn_global(feature, flags.feat_mean, flags.feat_var)
feature = fea.tf_fea_splice(feature, flags.l_splice, flags.r_splice)
feature = tf.reshape(feature, [-1, flags.input_dim])
return feature[:], label[:]
def read_my_file_format(filename_queue, org_feat_dim):
reader = tf.TFRecordReader()
_, serialized_example = reader.read(filename_queue)
raw_example = tf.parse_single_example(
serialized_example,
features={
'feat': tf.FixedLenFeature([], tf.string),
'label': tf.FixedLenFeature([], tf.string),
})
example = tf.decode_raw(raw_example['feat'], tf.float32)
example = tf.reshape(example, [-1, org_feat_dim])
label = tf.decode_raw(raw_example['label'], tf.int32)
return example, label
def lstm_input_pipeline(flags, is_training, num_epochs=None, shuffle_state = True):
with tf.device(flags.default_device):
if is_training:
filenames = getfilelst(flags.trn_data_dir + '/tf.lst')
else:
filenames = getfilelst(flags.cv_data_dir + '/tf.lst')
filename_queue = tf.train.string_input_producer(
filenames, num_epochs = num_epochs, shuffle = shuffle_state)
sample = read_my_file_format(filename_queue, flags.org_feat_dim)
sample_feats, sample_labels = process_my_feature(sample[0], sample[1], flags)
sample_length = tf.shape(sample_feats)[0]
sample_masks = tf.ones([sample_length], dtype=tf.float32)
if flags.target_delay > 0:
feats_part1 = tf.slice(sample_feats, [flags.target_delay, 0], [sample_length-flags.target_delay, -1])
last_frm_feats = tf.slice(sample_feats, [sample_length-1, 0], [1, -1])
feats_part2 = tf.concat([last_frm_feats for _ in range(flags.target_delay)], axis=0)
sample_feats = tf.concat([feats_part1, feats_part2], axis=0)
padding_length = flags.num_steps - sample_length % flags.num_steps
padding_feats = tf.zeros([padding_length, flags.input_dim], dtype=tf.float32)
feats = tf.concat(axis=0, values=[sample_feats, padding_feats])
padding_labels = tf.zeros([padding_length], dtype=tf.int32)
labels = tf.concat(axis=0, values=[sample_labels, padding_labels])
padding_masks = tf.zeros([padding_length], dtype=tf.float32)
frame_masks = tf.concat(axis=0, values=[sample_masks, padding_masks])
return feats, labels, frame_masks
| true | true |
f71d05be152905647cb539a75d42c89a113e0993 | 556 | py | Python | src/operations/equality.py | iamantony/PythonNotes | 4ed740378bd5e031e2c21675f33ae8e199a48bbb | [
"MIT"
] | null | null | null | src/operations/equality.py | iamantony/PythonNotes | 4ed740378bd5e031e2c21675f33ae8e199a48bbb | [
"MIT"
] | null | null | null | src/operations/equality.py | iamantony/PythonNotes | 4ed740378bd5e031e2c21675f33ae8e199a48bbb | [
"MIT"
] | null | null | null | __author__ = 'Antony Cherepanov'
def diff_ways_to_equality_check():
print("\ndiff_ways_to_equality_check()")
l1 = l2 = [1, 2, 3]
print("Our lists: " + str(l1) + ", " + str(l2) + ". They reference to the same object")
print("l1 == l2 ? : ", l1 == l2)
print("l1 is l2 ? : ", l1 is l2)
l3 = [1, 2]
l4 = [1, 2]
print("Our lists: " + str(l3) + ", " + str(l4) + ". They reference to different objects")
print("l3 == l4 ? : ", l3 == l4)
print("l3 is l4 ? : ", l3 is l4)
diff_ways_to_equality_check() | 29.263158 | 94 | 0.532374 | __author__ = 'Antony Cherepanov'
def diff_ways_to_equality_check():
print("\ndiff_ways_to_equality_check()")
l1 = l2 = [1, 2, 3]
print("Our lists: " + str(l1) + ", " + str(l2) + ". They reference to the same object")
print("l1 == l2 ? : ", l1 == l2)
print("l1 is l2 ? : ", l1 is l2)
l3 = [1, 2]
l4 = [1, 2]
print("Our lists: " + str(l3) + ", " + str(l4) + ". They reference to different objects")
print("l3 == l4 ? : ", l3 == l4)
print("l3 is l4 ? : ", l3 is l4)
diff_ways_to_equality_check() | true | true |
f71d05e03c289edc6d71ce6c053ebcd1ba27ca78 | 3,562 | py | Python | cfgov/login/tests/test_auth_forms.py | Colin-Seifer/consumerfinance.gov | a1a943f7170b498707d642d6be97b9a97a2b52e3 | [
"CC0-1.0"
] | 156 | 2015-01-16T15:16:46.000Z | 2020-08-04T04:48:01.000Z | cfgov/login/tests/test_auth_forms.py | Colin-Seifer/consumerfinance.gov | a1a943f7170b498707d642d6be97b9a97a2b52e3 | [
"CC0-1.0"
] | 3,604 | 2015-01-05T22:09:12.000Z | 2020-08-14T17:09:19.000Z | cfgov/login/tests/test_auth_forms.py | Colin-Seifer/consumerfinance.gov | a1a943f7170b498707d642d6be97b9a97a2b52e3 | [
"CC0-1.0"
] | 102 | 2015-01-28T14:51:18.000Z | 2020-08-10T00:00:39.000Z | from unittest.mock import patch
from django.contrib.auth.models import User
from django.test import TestCase
from login.forms import CFGOVPasswordChangeForm, UserCreationForm, UserEditForm
from login.tests.test_password_policy import TestWithUser
@patch("login.forms.send_password_reset_email")
class UserCreationFormTestCase(TestCase):
def setUp(self):
self.username = self.__class__.__name__
self.email = "george@example.com"
self.userdata = {
"email": self.email,
"username": self.username,
"first_name": "George",
"last_name": "Washington",
"password1": "cherrytree",
"password2": "cherrytree",
}
def tearDown(self):
User.objects.filter(username=self.username).delete()
def test_save_sends_email(self, send_email):
form = UserCreationForm(self.userdata)
self.assertTrue(form.is_valid())
form.save(commit=True)
send_email.assert_called_once_with(self.email)
def test_save_without_commit_doesnt_send_email(self, send_email):
form = UserCreationForm(self.userdata)
self.assertTrue(form.is_valid())
form.save(commit=False)
send_email.assert_not_called()
def test_duplicate_email_fails_validation(self, send_email):
User.objects.create(username="foo", email=self.email)
form = UserCreationForm(self.userdata)
self.assertFalse(form.is_valid())
self.assertTrue(form.errors["email"])
class UserEditFormTestCase(TestCase):
def setUp(self):
self.userdata = {
"username": "george",
"email": "george@washington.com",
"first_name": "george",
"last_name": "washington",
}
def test_no_edits_valid(self):
user = User.objects.create(**self.userdata)
form = UserEditForm(data=self.userdata, instance=user)
self.assertTrue(form.is_valid())
def test_edit_first_name(self):
user = User.objects.create(**self.userdata)
userdata2 = dict(self.userdata)
userdata2["first_name"] = "joe"
form = UserEditForm(data=userdata2, instance=user)
self.assertTrue(form.is_valid())
user = form.save()
self.assertEqual(user.first_name, "joe")
self.assertEqual(user.username, "george")
def test_duplicate_email_fails_validation(self):
User.objects.create(**self.userdata)
userdata2 = dict(self.userdata)
userdata2["username"] = "patrick"
form = UserEditForm(data=userdata2)
self.assertFalse(form.is_valid())
self.assertTrue(form.errors["email"])
def test_duplicate_emails_allowed_on_user_model(self):
User.objects.create(**self.userdata)
userdata2 = dict(self.userdata)
userdata2["username"] = "patrick"
try:
User.objects.create(**userdata2)
except Exception:
self.fail(
"users with duplicate emails are allowed, "
"just not when creating or editing via for "
)
class PasswordValidationMixinTestCase(TestWithUser):
def test_edit_password(self):
user = self.get_user(last_password="testing")
form = CFGOVPasswordChangeForm(
data={
"old_password": "testing",
"new_password1": "Testing12345!",
"new_password2": "Testing12345!",
},
user=user,
)
form.is_valid()
self.assertTrue(form.is_valid())
| 32.09009 | 79 | 0.635036 | from unittest.mock import patch
from django.contrib.auth.models import User
from django.test import TestCase
from login.forms import CFGOVPasswordChangeForm, UserCreationForm, UserEditForm
from login.tests.test_password_policy import TestWithUser
@patch("login.forms.send_password_reset_email")
class UserCreationFormTestCase(TestCase):
def setUp(self):
self.username = self.__class__.__name__
self.email = "george@example.com"
self.userdata = {
"email": self.email,
"username": self.username,
"first_name": "George",
"last_name": "Washington",
"password1": "cherrytree",
"password2": "cherrytree",
}
def tearDown(self):
User.objects.filter(username=self.username).delete()
def test_save_sends_email(self, send_email):
form = UserCreationForm(self.userdata)
self.assertTrue(form.is_valid())
form.save(commit=True)
send_email.assert_called_once_with(self.email)
def test_save_without_commit_doesnt_send_email(self, send_email):
form = UserCreationForm(self.userdata)
self.assertTrue(form.is_valid())
form.save(commit=False)
send_email.assert_not_called()
def test_duplicate_email_fails_validation(self, send_email):
User.objects.create(username="foo", email=self.email)
form = UserCreationForm(self.userdata)
self.assertFalse(form.is_valid())
self.assertTrue(form.errors["email"])
class UserEditFormTestCase(TestCase):
def setUp(self):
self.userdata = {
"username": "george",
"email": "george@washington.com",
"first_name": "george",
"last_name": "washington",
}
def test_no_edits_valid(self):
user = User.objects.create(**self.userdata)
form = UserEditForm(data=self.userdata, instance=user)
self.assertTrue(form.is_valid())
def test_edit_first_name(self):
user = User.objects.create(**self.userdata)
userdata2 = dict(self.userdata)
userdata2["first_name"] = "joe"
form = UserEditForm(data=userdata2, instance=user)
self.assertTrue(form.is_valid())
user = form.save()
self.assertEqual(user.first_name, "joe")
self.assertEqual(user.username, "george")
def test_duplicate_email_fails_validation(self):
User.objects.create(**self.userdata)
userdata2 = dict(self.userdata)
userdata2["username"] = "patrick"
form = UserEditForm(data=userdata2)
self.assertFalse(form.is_valid())
self.assertTrue(form.errors["email"])
def test_duplicate_emails_allowed_on_user_model(self):
User.objects.create(**self.userdata)
userdata2 = dict(self.userdata)
userdata2["username"] = "patrick"
try:
User.objects.create(**userdata2)
except Exception:
self.fail(
"users with duplicate emails are allowed, "
"just not when creating or editing via for "
)
class PasswordValidationMixinTestCase(TestWithUser):
def test_edit_password(self):
user = self.get_user(last_password="testing")
form = CFGOVPasswordChangeForm(
data={
"old_password": "testing",
"new_password1": "Testing12345!",
"new_password2": "Testing12345!",
},
user=user,
)
form.is_valid()
self.assertTrue(form.is_valid())
| true | true |
f71d07441c3958aedc9d9d95b22f02745383912c | 1,054 | py | Python | spylon_kernel/__init__.py | Gr4vi7y/spylon-kernel | 2d0ddf2aca1b91738f938b72a500c20293e3156c | [
"BSD-3-Clause"
] | 111 | 2018-03-12T09:22:07.000Z | 2021-04-06T07:33:52.000Z | spylon_kernel/__init__.py | mariusvniekerk/metakernel-scala-spark | 2d0ddf2aca1b91738f938b72a500c20293e3156c | [
"BSD-3-Clause"
] | 34 | 2017-01-30T19:12:26.000Z | 2017-09-08T14:14:07.000Z | spylon_kernel/__init__.py | mariusvniekerk/metakernel-scala-spark | 2d0ddf2aca1b91738f938b72a500c20293e3156c | [
"BSD-3-Clause"
] | 16 | 2018-04-11T19:25:14.000Z | 2020-10-20T10:56:27.000Z | from __future__ import absolute_import, print_function, division
from .scala_kernel import SpylonKernel
from .scala_magic import ScalaMagic
from .init_spark_magic import InitSparkMagic
from .scala_interpreter import get_scala_interpreter
def register_ipython_magics():
"""For usage within ipykernel.
This will instantiate the magics for IPython
"""
from metakernel import IPythonKernel
from IPython.core.magic import register_cell_magic, register_line_cell_magic
kernel = IPythonKernel()
scala_magic = ScalaMagic(kernel)
init_spark_magic = InitSparkMagic(kernel)
@register_line_cell_magic
def scala(line, cell):
if line:
return scala_magic.line_scala(line)
else:
scala_magic.code = cell
return scala_magic.cell_scala()
@register_cell_magic
def init_spark(line, cell):
init_spark_magic.code = cell
return init_spark_magic.cell_init_spark()
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
| 29.277778 | 80 | 0.743833 | from __future__ import absolute_import, print_function, division
from .scala_kernel import SpylonKernel
from .scala_magic import ScalaMagic
from .init_spark_magic import InitSparkMagic
from .scala_interpreter import get_scala_interpreter
def register_ipython_magics():
from metakernel import IPythonKernel
from IPython.core.magic import register_cell_magic, register_line_cell_magic
kernel = IPythonKernel()
scala_magic = ScalaMagic(kernel)
init_spark_magic = InitSparkMagic(kernel)
@register_line_cell_magic
def scala(line, cell):
if line:
return scala_magic.line_scala(line)
else:
scala_magic.code = cell
return scala_magic.cell_scala()
@register_cell_magic
def init_spark(line, cell):
init_spark_magic.code = cell
return init_spark_magic.cell_init_spark()
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
| true | true |
f71d08c432f7544e984ac784fbbc3c5d98bf9349 | 755 | py | Python | wemake_python_styleguide/presets/types/file_tokens.py | cdhiraj40/wemake-python-styleguide | 7cef9be081d594c30045b7a98cae77a9be46e1aa | [
"MIT"
] | 1,931 | 2018-03-17T13:52:45.000Z | 2022-03-27T09:39:17.000Z | wemake_python_styleguide/presets/types/file_tokens.py | amansr02/wemake-python-styleguide | 681035ed21fbe28ebfb32b8807b98e8de76b64aa | [
"MIT"
] | 2,231 | 2018-03-09T21:19:05.000Z | 2022-03-31T08:35:37.000Z | wemake_python_styleguide/presets/types/file_tokens.py | amansr02/wemake-python-styleguide | 681035ed21fbe28ebfb32b8807b98e8de76b64aa | [
"MIT"
] | 492 | 2018-05-18T21:20:28.000Z | 2022-03-20T14:11:50.000Z | from typing_extensions import Final
from wemake_python_styleguide.visitors.tokenize import (
comments,
conditions,
primitives,
statements,
syntax,
)
#: Used to store all token related visitors to be later passed to checker:
PRESET: Final = (
comments.WrongCommentVisitor,
comments.ShebangVisitor,
comments.NoqaVisitor,
comments.EmptyCommentVisitor,
syntax.WrongKeywordTokenVisitor,
primitives.WrongNumberTokenVisitor,
primitives.WrongStringTokenVisitor,
primitives.WrongStringConcatenationVisitor,
statements.ExtraIndentationVisitor,
statements.BracketLocationVisitor,
statements.MultilineStringVisitor,
statements.InconsistentComprehensionVisitor,
conditions.IfElseVisitor,
)
| 24.354839 | 74 | 0.780132 | from typing_extensions import Final
from wemake_python_styleguide.visitors.tokenize import (
comments,
conditions,
primitives,
statements,
syntax,
)
PRESET: Final = (
comments.WrongCommentVisitor,
comments.ShebangVisitor,
comments.NoqaVisitor,
comments.EmptyCommentVisitor,
syntax.WrongKeywordTokenVisitor,
primitives.WrongNumberTokenVisitor,
primitives.WrongStringTokenVisitor,
primitives.WrongStringConcatenationVisitor,
statements.ExtraIndentationVisitor,
statements.BracketLocationVisitor,
statements.MultilineStringVisitor,
statements.InconsistentComprehensionVisitor,
conditions.IfElseVisitor,
)
| true | true |
f71d0905abbc7a17b2098e10bd4351b995725485 | 5,026 | py | Python | server/src/army.py | matteli/histemul | 61f1ea8e1263b92fd2bead0c808f67940faad802 | [
"BSD-2-Clause"
] | 1 | 2019-07-05T09:40:50.000Z | 2019-07-05T09:40:50.000Z | server/src/army.py | matteli/histemul | 61f1ea8e1263b92fd2bead0c808f67940faad802 | [
"BSD-2-Clause"
] | null | null | null | server/src/army.py | matteli/histemul | 61f1ea8e1263b92fd2bead0c808f67940faad802 | [
"BSD-2-Clause"
] | null | null | null | '''
Copyright (c) 2012-2015, Matthieu Nué
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
THE POSSIBILITY OF SUCH DAMAGE.
'''
from mongoengine import Document, ReferenceField, IntField, ListField, BooleanField, StringField
from battle import Battle
#TODO: make origin the pk
class Army(Document):
for_the = ReferenceField('Person')
battle = ReferenceField('Battle')
attitude = StringField()
location = ReferenceField('Province')
origin = ReferenceField('Province')
way = ListField(ReferenceField('Province'))
next_province = ReferenceField('Province')
knights = IntField()
morale = IntField()
time_walking = IntField()
@classmethod
def new(cls, province):
army = cls.objects.create(for_the=province.domain_of.holder, attitude='normal', location=province, origin=province, knights = province.manpower, morale=100, time_walking=0)
province.manpower = 0
province.save()
return army
def move(self, way):
self.way = way
self.save()
return
def dismiss(self):
self.origin.manpower += self.knights
self.origin.save()
#army.knights = 0
#army.save()
self.delete()
return
def stop(self):
self.next_province = None
self.time_walking = 0
self.way = []
#self.save()
return
def retreat(self):
province = self.location.get_random_walkable_adjacent()
if province:
self.battle = None
self.attitude = 'retreat'
self.next_province = province
self.way.append(province)
self.time_walking = 0
#self.save()
return True
else:
return False
def update(self, date):
if self.way and self.next_province != self.way[-1]: #change way since last update
self.next_province = self.way[-1]
self.time_walking = 0
if self.time_walking >= self.location.size: #enter a new province
self.time_walking -= self.location.size
province = self.next_province
self.location = province
self.way.pop()
if self.way:
self.next_province = self.way[-1]
else:
self.next_province = None
self.attitude = 'normal'
#when enter a new province, look if there is enemy or already a battle
person = self.for_the
battle = province.battle
if not battle:
war = None
enemies = []
for army_in_province in province.armies:
if not war:
war = person.in_war_against(army_in_province.for_the)['war']
enemies.append(army_in_province)
else:
w = person.in_war_against(army_in_province.for_the)[0]['war']
if w == war:
enemies.append(army_in_province)
if enemies: #enemy so battle
self.stop()
Battle.new(war, province, [self], enemies)
else:
war = battle.war
if person in war.aggressors:
self.stop()
battle.add_aggressor(self)
if person in war.defenders:
self.stop()
battle.add_defender(self)
if self.next_province:
self.time_walking += 500 * self.location.land.walkable
else:
self.time_walking = 0
#morale
if self.attitude == 'normal':
if self.morale < 95:
self.morale += 5
else:
self.morale = 100
self.save()
| 35.146853 | 180 | 0.607839 |
from mongoengine import Document, ReferenceField, IntField, ListField, BooleanField, StringField
from battle import Battle
class Army(Document):
for_the = ReferenceField('Person')
battle = ReferenceField('Battle')
attitude = StringField()
location = ReferenceField('Province')
origin = ReferenceField('Province')
way = ListField(ReferenceField('Province'))
next_province = ReferenceField('Province')
knights = IntField()
morale = IntField()
time_walking = IntField()
@classmethod
def new(cls, province):
army = cls.objects.create(for_the=province.domain_of.holder, attitude='normal', location=province, origin=province, knights = province.manpower, morale=100, time_walking=0)
province.manpower = 0
province.save()
return army
def move(self, way):
self.way = way
self.save()
return
def dismiss(self):
self.origin.manpower += self.knights
self.origin.save()
self.delete()
return
def stop(self):
self.next_province = None
self.time_walking = 0
self.way = []
return
def retreat(self):
province = self.location.get_random_walkable_adjacent()
if province:
self.battle = None
self.attitude = 'retreat'
self.next_province = province
self.way.append(province)
self.time_walking = 0
return True
else:
return False
def update(self, date):
if self.way and self.next_province != self.way[-1]:
self.next_province = self.way[-1]
self.time_walking = 0
if self.time_walking >= self.location.size:
self.time_walking -= self.location.size
province = self.next_province
self.location = province
self.way.pop()
if self.way:
self.next_province = self.way[-1]
else:
self.next_province = None
self.attitude = 'normal'
person = self.for_the
battle = province.battle
if not battle:
war = None
enemies = []
for army_in_province in province.armies:
if not war:
war = person.in_war_against(army_in_province.for_the)['war']
enemies.append(army_in_province)
else:
w = person.in_war_against(army_in_province.for_the)[0]['war']
if w == war:
enemies.append(army_in_province)
if enemies:
self.stop()
Battle.new(war, province, [self], enemies)
else:
war = battle.war
if person in war.aggressors:
self.stop()
battle.add_aggressor(self)
if person in war.defenders:
self.stop()
battle.add_defender(self)
if self.next_province:
self.time_walking += 500 * self.location.land.walkable
else:
self.time_walking = 0
if self.attitude == 'normal':
if self.morale < 95:
self.morale += 5
else:
self.morale = 100
self.save()
| true | true |
f71d0918a14b24074948327cd78a0618dd6eff25 | 5,508 | py | Python | setup.py | openedx/bok-choy | b2f82ebea4c24c84361170063d8cad0314405a4a | [
"Apache-2.0"
] | 2 | 2022-01-22T22:22:53.000Z | 2022-02-28T03:13:57.000Z | setup.py | openedx/bok-choy | b2f82ebea4c24c84361170063d8cad0314405a4a | [
"Apache-2.0"
] | 16 | 2022-01-11T04:11:33.000Z | 2022-03-29T12:30:45.000Z | setup.py | openedx/bok-choy | b2f82ebea4c24c84361170063d8cad0314405a4a | [
"Apache-2.0"
] | 1 | 2022-03-16T14:43:57.000Z | 2022-03-16T14:43:57.000Z | #!/usr/bin/env python
import codecs
import os
import re
import sys
from setuptools import setup
DESCRIPTION = 'UI-level acceptance test framework'
def load_requirements(*requirements_paths):
"""
Load all requirements from the specified requirements files.
Requirements will include any constraints from files specified
with -c in the requirements files.
Returns a list of requirement strings.
"""
# UPDATED VIA SEMGREP - if you need to remove/modify this method remove this line and add a comment specifying why.
# minor update to allow brackets in library names
requirements = {}
constraint_files = set()
# groups "my-package-name<=x.y.z,..." into ("my-package-name", "<=x.y.z,...")
requirement_line_regex = re.compile(r"([a-zA-Z0-9-_.\[\]]+)([<>=][^#\s]+)?")
def add_version_constraint_or_raise(current_line, current_requirements, add_if_not_present):
regex_match = requirement_line_regex.match(current_line)
if regex_match:
package = regex_match.group(1)
version_constraints = regex_match.group(2)
existing_version_constraints = current_requirements.get(package, None)
# it's fine to add constraints to an unconstrained package, but raise an error if there are already
# constraints in place
if existing_version_constraints and existing_version_constraints != version_constraints:
raise BaseException(f'Multiple constraint definitions found for {package}:'
f' "{existing_version_constraints}" and "{version_constraints}".'
f'Combine constraints into one location with {package}'
f'{existing_version_constraints},{version_constraints}.')
if add_if_not_present or package in current_requirements:
current_requirements[package] = version_constraints
# process .in files and store the path to any constraint files that are pulled in
for path in requirements_paths:
with open(path) as reqs:
for line in reqs:
if is_requirement(line):
add_version_constraint_or_raise(line, requirements, True)
if line and line.startswith('-c') and not line.startswith('-c http'):
constraint_files.add(os.path.dirname(path) + '/' + line.split('#')[0].replace('-c', '').strip())
# process constraint files and add any new constraints found to existing requirements
for constraint_file in constraint_files:
with open(constraint_file) as reader:
for line in reader:
if is_requirement(line):
add_version_constraint_or_raise(line, requirements, False)
# process back into list of pkg><=constraints strings
constrained_requirements = [f'{pkg}{version or ""}' for (pkg, version) in sorted(requirements.items())]
return constrained_requirements
def is_requirement(line):
"""
Return True if the requirement line is a package requirement.
Returns:
bool: True if the line is not blank, a comment,
a URL, or an included file
"""
# UPDATED VIA SEMGREP - if you need to remove/modify this method remove this line and add a comment specifying why
return line and line.strip() and not line.startswith(('-r', '#', '-e', 'git+', '-c'))
if sys.argv[-1] == 'tag':
print("Tagging the version on github:")
os.system("git tag -a v%s -m 'v%s'" % (VERSION, VERSION))
os.system("git push --tags")
sys.exit()
with codecs.open('README.rst', 'r', 'utf-8') as f:
LONG_DESCRIPTION = f.read()
def get_version(*file_paths):
"""
Extract the version string from the file at the given relative path fragments.
"""
filename = os.path.join(os.path.dirname(__file__), *file_paths)
with open(filename, encoding='utf-8') as opened_file:
version_file = opened_file.read()
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError('Unable to find version string.')
VERSION = get_version("bok_choy", "__init__.py")
setup(
name='bok_choy',
version=VERSION,
author='edX',
author_email='oscm@edx.org',
url='http://github.com/edx/bok-choy',
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
license='Apache 2.0',
classifiers=['Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Quality Assurance'],
packages=['bok_choy', 'bok_choy/a11y'],
package_data={'bok_choy': ['vendor/google/*.*', 'vendor/axe-core/*.*']},
install_requires=load_requirements('requirements/base.in'),
extras_require={
'visual_diff': ['needle']
}
)
| 40.5 | 119 | 0.634713 |
import codecs
import os
import re
import sys
from setuptools import setup
DESCRIPTION = 'UI-level acceptance test framework'
def load_requirements(*requirements_paths):
requirements = {}
constraint_files = set()
requirement_line_regex = re.compile(r"([a-zA-Z0-9-_.\[\]]+)([<>=][^#\s]+)?")
def add_version_constraint_or_raise(current_line, current_requirements, add_if_not_present):
regex_match = requirement_line_regex.match(current_line)
if regex_match:
package = regex_match.group(1)
version_constraints = regex_match.group(2)
existing_version_constraints = current_requirements.get(package, None)
# constraints in place
if existing_version_constraints and existing_version_constraints != version_constraints:
raise BaseException(f'Multiple constraint definitions found for {package}:'
f' "{existing_version_constraints}" and "{version_constraints}".'
f'Combine constraints into one location with {package}'
f'{existing_version_constraints},{version_constraints}.')
if add_if_not_present or package in current_requirements:
current_requirements[package] = version_constraints
# process .in files and store the path to any constraint files that are pulled in
for path in requirements_paths:
with open(path) as reqs:
for line in reqs:
if is_requirement(line):
add_version_constraint_or_raise(line, requirements, True)
if line and line.startswith('-c') and not line.startswith('-c http'):
constraint_files.add(os.path.dirname(path) + '/' + line.split('
# process constraint files and add any new constraints found to existing requirements
for constraint_file in constraint_files:
with open(constraint_file) as reader:
for line in reader:
if is_requirement(line):
add_version_constraint_or_raise(line, requirements, False)
# process back into list of pkg><=constraints strings
constrained_requirements = [f'{pkg}{version or ""}' for (pkg, version) in sorted(requirements.items())]
return constrained_requirements
def is_requirement(line):
# UPDATED VIA SEMGREP - if you need to remove/modify this method remove this line and add a comment specifying why
return line and line.strip() and not line.startswith(('-r', '
if sys.argv[-1] == 'tag':
print("Tagging the version on github:")
os.system("git tag -a v%s -m 'v%s'" % (VERSION, VERSION))
os.system("git push --tags")
sys.exit()
with codecs.open('README.rst', 'r', 'utf-8') as f:
LONG_DESCRIPTION = f.read()
def get_version(*file_paths):
filename = os.path.join(os.path.dirname(__file__), *file_paths)
with open(filename, encoding='utf-8') as opened_file:
version_file = opened_file.read()
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError('Unable to find version string.')
VERSION = get_version("bok_choy", "__init__.py")
setup(
name='bok_choy',
version=VERSION,
author='edX',
author_email='oscm@edx.org',
url='http://github.com/edx/bok-choy',
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
license='Apache 2.0',
classifiers=['Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Quality Assurance'],
packages=['bok_choy', 'bok_choy/a11y'],
package_data={'bok_choy': ['vendor/google/*.*', 'vendor/axe-core/*.*']},
install_requires=load_requirements('requirements/base.in'),
extras_require={
'visual_diff': ['needle']
}
)
| true | true |
f71d0934a87f934d765eb310af08af588938c234 | 7,835 | py | Python | pyschism/outputs/combine.py | SorooshMani-NOAA/pyschism | df803edb53184625b12399f38a8bd26a022abbc1 | [
"Apache-2.0"
] | null | null | null | pyschism/outputs/combine.py | SorooshMani-NOAA/pyschism | df803edb53184625b12399f38a8bd26a022abbc1 | [
"Apache-2.0"
] | null | null | null | pyschism/outputs/combine.py | SorooshMani-NOAA/pyschism | df803edb53184625b12399f38a8bd26a022abbc1 | [
"Apache-2.0"
] | null | null | null | import os
from time import time
import pathlib
from typing import Dict, Union
import glob
import numpy as np
import xarray as xr
from pyschism.mesh.base import Gr3
def combine(var, shape, l2g, name):
values = np.full(tuple(shape), np.nan)
local_ids = list(l2g.keys())
for i, data in enumerate(var):
cpu_id = str(i).zfill(6)
#print(cpu_id)
n_local_to_global = l2g[cpu_id]
#print(n_local_to_global[0])
local_ids = list(n_local_to_global.keys())
global_idxs = list(
map(lambda x: int(n_local_to_global[x])-1, local_ids))
#print(global_idxs[0])
values[global_idxs] = data
return values
def combine_(dst, var, shape, l2g, name):
#if len(dst[0][var].shape) < 4:
out = []
for i in range(len(dst)):
out.append(dst[i][var])
r = combine(out, shape, l2g, name)
return (xr.DataArray(r, dims = list(dst[0][var].dims), name = var))
class CombineOutputs:
def __init__(self, path: Union[str, os.PathLike]):
self.path = pathlib.Path(path)
if not self.path.exists():
raise ValueError(f'Directory {self.path} does not exist.')
nodes = {}
elements = {}
for ifile in sorted(self.path.glob(r'local_to_global_[0-9][0-9][0-9][0-9][0-9][0-9]')):
with open(ifile) as f:
ns_global, ne_global, np_global, nvrt, nproc, ntracers, \
T, S, GEN, AGE, SED3D, EcoSim, ICM, CoSINE, Feco, \
TIMOR, FARM, DVD = f.readline().split()
f.readline()
# elements
ne_local = int(f.readline())
e_local_to_global = {}
for i in range(ne_local):
local_element_id, global_element_id = f.readline().split()
e_local_to_global[local_element_id] = global_element_id
# points
np_local = int(f.readline())
n_local_to_global = {}
for i in range(np_local):
local_node_id, global_node_id = f.readline().split()
n_local_to_global[local_node_id] = global_node_id
# sides
ns_local = int(f.readline())
s_local_to_global = {}
for i in range(ns_local):
local_side_id, global_side_id = f.readline().split()
s_local_to_global[local_side_id] = global_side_id
f.readline() # Header:
line = f.readline().split()
#old schism print to multiple lines not just one line
if len(line) != 5:
line.extend(f.readline().split())
self.start_year, self.start_month, self.start_day, \
self.start_hour, self.utc_start = line
nrec, dtout, nspool, nvrt, kz, h0, h_s, h_c, theta_b, \
theta_f, ics = f.readline().split()
#In the old version of schism, ztot was written to nvrt lines
for i in np.arange(int(nvrt)):
f.readline() # (ztot(k),k=1,kz-1),(sigma(k),k=1,nvrt-kz+1)
f.readline() # (ztot(k),k=1,kz-1),(sigma(k),k=1,nvrt-kz+1)
#_ne_local = None
#_np_local = None
#while _ne_local != ne_local and _np_local != np_local:
# line = f.readline().split()
# _np_local = int(float(line[0])
# _ne_local = int(float(line[1])
for i in range(np_local):
x, y, z, flag = map(float, f.readline().split())
nodes.setdefault(
n_local_to_global[str(i+1)], ((x, y), -z))
for i in range(ne_local):
eids = f.readline().split()[1:]
elements.setdefault(
e_local_to_global[str(i+1)],
list(map(lambda x: n_local_to_global[x], eids)))
nproc_id = ifile.name.split('local_to_global_')[-1]
self.e_local_to_global.setdefault(nproc_id, e_local_to_global)
self.n_local_to_global.setdefault(nproc_id, n_local_to_global)
self.s_local_to_global.setdefault(nproc_id, s_local_to_global)
#nodes = {str(i+1): nodes[str(i+1)] for i in range(len(nodes))}
#elements = {str(i+1): elements[str(i+1)] for i in range(len(elements))}
self.hgrid = Gr3(nodes=nodes, elements=elements, crs='epsg:4326')
def hotstart(self, it=None):
self.filenames = sorted(
self.path.glob(r'hotstart_*_{}.nc'.format(it)))
dst = []
for i in range(len(self.filenames)):
dst.append(xr.open_dataset(self.filenames[i]))
#create dataset
side = []
node = []
elem = []
one = []
#variables = ['eta2', 'su2', 'sv2']
#variables = ['eta2', 'we', 'su2', 'tr_el', 'time', 'it', 'ifile', 'nsteps_from_cold']
#for var in variables:
for var in dst[0].variables:
t0 = time()
shape = []
if 'nResident_elem' in dst[0][var].dims:
shape.append(self.hgrid.elements.array.shape[0])
if len(dst[0][var].shape) > 1:
for i in range(len(dst[0][var].shape)):
if i == 0:
continue
else:
shape.append(dst[0][var].shape[i])
r = combine_(dst, var, shape, self.e_local_to_global, 'nResident_elem')
elem.append(r)
elif 'nResident_node' in dst[0][var].dims:
shape.append(self.hgrid.nodes.values.shape[0])
if len(dst[0][var].shape) > 1:
for i in range(len(dst[0][var].shape)):
if i == 0:
continue
else:
shape.append(dst[0][var].shape[i])
r = combine_(dst, var, shape, self.n_local_to_global, 'nResident_node')
node.append(r)
elif 'nResident_side' in dst[0][var].dims:
shape.append(self.hgrid.elements.sides.shape[0])
if len(dst[0][var].shape) > 1:
for i in range(len(dst[0][var].shape)):
if i == 0:
continue
else:
shape.append(dst[0][var].shape[i])
r = combine_(dst, var, shape, self.s_local_to_global, 'nResident_side')
side.append(r)
else:
one.append(dst[0][var])
print(f'It took {time()-t0} seconds to combine var {var} in file[{i}]')
side = xr.merge(side).rename({'nResident_side': 'side'})
elem = xr.merge(elem).rename({'nResident_elem': 'elem'})
node = xr.merge(node).rename({'nResident_node': 'node'})
one = xr.merge(one).rename({'one': 'one_new', 'it': 'iths'})
xdat = xr.merge([side, elem, node, one])
#xdat = xr.merge([node, one])
hfile = 'hotstart_it={}.nc'.format(it)
xdat.to_netcdf(f'./{hfile}')
@property
def n_local_to_global(self):
if not hasattr(self, '_n_local_to_global'):
self._n_local_to_global = {}
return self._n_local_to_global
@property
def s_local_to_global(self):
if not hasattr(self, '_s_local_to_global'):
self._s_local_to_global = {}
return self._s_local_to_global
@property
def e_local_to_global(self):
if not hasattr(self, '_e_local_to_global'):
self._e_local_to_global = {}
return self._e_local_to_global
| 41.675532 | 95 | 0.515763 | import os
from time import time
import pathlib
from typing import Dict, Union
import glob
import numpy as np
import xarray as xr
from pyschism.mesh.base import Gr3
def combine(var, shape, l2g, name):
values = np.full(tuple(shape), np.nan)
local_ids = list(l2g.keys())
for i, data in enumerate(var):
cpu_id = str(i).zfill(6)
n_local_to_global = l2g[cpu_id]
local_ids = list(n_local_to_global.keys())
global_idxs = list(
map(lambda x: int(n_local_to_global[x])-1, local_ids))
values[global_idxs] = data
return values
def combine_(dst, var, shape, l2g, name):
out = []
for i in range(len(dst)):
out.append(dst[i][var])
r = combine(out, shape, l2g, name)
return (xr.DataArray(r, dims = list(dst[0][var].dims), name = var))
class CombineOutputs:
def __init__(self, path: Union[str, os.PathLike]):
self.path = pathlib.Path(path)
if not self.path.exists():
raise ValueError(f'Directory {self.path} does not exist.')
nodes = {}
elements = {}
for ifile in sorted(self.path.glob(r'local_to_global_[0-9][0-9][0-9][0-9][0-9][0-9]')):
with open(ifile) as f:
ns_global, ne_global, np_global, nvrt, nproc, ntracers, \
T, S, GEN, AGE, SED3D, EcoSim, ICM, CoSINE, Feco, \
TIMOR, FARM, DVD = f.readline().split()
f.readline()
ne_local = int(f.readline())
e_local_to_global = {}
for i in range(ne_local):
local_element_id, global_element_id = f.readline().split()
e_local_to_global[local_element_id] = global_element_id
np_local = int(f.readline())
n_local_to_global = {}
for i in range(np_local):
local_node_id, global_node_id = f.readline().split()
n_local_to_global[local_node_id] = global_node_id
ns_local = int(f.readline())
s_local_to_global = {}
for i in range(ns_local):
local_side_id, global_side_id = f.readline().split()
s_local_to_global[local_side_id] = global_side_id
f.readline()
line = f.readline().split()
if len(line) != 5:
line.extend(f.readline().split())
self.start_year, self.start_month, self.start_day, \
self.start_hour, self.utc_start = line
nrec, dtout, nspool, nvrt, kz, h0, h_s, h_c, theta_b, \
theta_f, ics = f.readline().split()
for i in np.arange(int(nvrt)):
f.readline()
f.readline()
for i in range(np_local):
x, y, z, flag = map(float, f.readline().split())
nodes.setdefault(
n_local_to_global[str(i+1)], ((x, y), -z))
for i in range(ne_local):
eids = f.readline().split()[1:]
elements.setdefault(
e_local_to_global[str(i+1)],
list(map(lambda x: n_local_to_global[x], eids)))
nproc_id = ifile.name.split('local_to_global_')[-1]
self.e_local_to_global.setdefault(nproc_id, e_local_to_global)
self.n_local_to_global.setdefault(nproc_id, n_local_to_global)
self.s_local_to_global.setdefault(nproc_id, s_local_to_global)
self.hgrid = Gr3(nodes=nodes, elements=elements, crs='epsg:4326')
def hotstart(self, it=None):
self.filenames = sorted(
self.path.glob(r'hotstart_*_{}.nc'.format(it)))
dst = []
for i in range(len(self.filenames)):
dst.append(xr.open_dataset(self.filenames[i]))
side = []
node = []
elem = []
one = []
for var in dst[0].variables:
t0 = time()
shape = []
if 'nResident_elem' in dst[0][var].dims:
shape.append(self.hgrid.elements.array.shape[0])
if len(dst[0][var].shape) > 1:
for i in range(len(dst[0][var].shape)):
if i == 0:
continue
else:
shape.append(dst[0][var].shape[i])
r = combine_(dst, var, shape, self.e_local_to_global, 'nResident_elem')
elem.append(r)
elif 'nResident_node' in dst[0][var].dims:
shape.append(self.hgrid.nodes.values.shape[0])
if len(dst[0][var].shape) > 1:
for i in range(len(dst[0][var].shape)):
if i == 0:
continue
else:
shape.append(dst[0][var].shape[i])
r = combine_(dst, var, shape, self.n_local_to_global, 'nResident_node')
node.append(r)
elif 'nResident_side' in dst[0][var].dims:
shape.append(self.hgrid.elements.sides.shape[0])
if len(dst[0][var].shape) > 1:
for i in range(len(dst[0][var].shape)):
if i == 0:
continue
else:
shape.append(dst[0][var].shape[i])
r = combine_(dst, var, shape, self.s_local_to_global, 'nResident_side')
side.append(r)
else:
one.append(dst[0][var])
print(f'It took {time()-t0} seconds to combine var {var} in file[{i}]')
side = xr.merge(side).rename({'nResident_side': 'side'})
elem = xr.merge(elem).rename({'nResident_elem': 'elem'})
node = xr.merge(node).rename({'nResident_node': 'node'})
one = xr.merge(one).rename({'one': 'one_new', 'it': 'iths'})
xdat = xr.merge([side, elem, node, one])
hfile = 'hotstart_it={}.nc'.format(it)
xdat.to_netcdf(f'./{hfile}')
@property
def n_local_to_global(self):
if not hasattr(self, '_n_local_to_global'):
self._n_local_to_global = {}
return self._n_local_to_global
@property
def s_local_to_global(self):
if not hasattr(self, '_s_local_to_global'):
self._s_local_to_global = {}
return self._s_local_to_global
@property
def e_local_to_global(self):
if not hasattr(self, '_e_local_to_global'):
self._e_local_to_global = {}
return self._e_local_to_global
| true | true |
f71d0974fd90c6e55313e89b3f21b9cf56c88f29 | 4,930 | py | Python | lib/silfont/scripts/psftuneraliases.py | simoncozens/pysilfont | bb8a9fc58a83e074bbcc466ba058841845b9107e | [
"MIT"
] | 41 | 2015-05-21T21:12:26.000Z | 2022-02-17T17:23:14.000Z | lib/silfont/scripts/psftuneraliases.py | simoncozens/pysilfont | bb8a9fc58a83e074bbcc466ba058841845b9107e | [
"MIT"
] | 63 | 2015-05-15T10:25:55.000Z | 2021-02-23T04:51:17.000Z | lib/silfont/scripts/psftuneraliases.py | simoncozens/pysilfont | bb8a9fc58a83e074bbcc466ba058841845b9107e | [
"MIT"
] | 12 | 2015-06-12T11:52:08.000Z | 2020-09-23T10:40:59.000Z | #!/usr/bin/env python
__doc__ = '''Merge lookup and feature aliases into TypeTuner feature file'''
__url__ = 'http://github.com/silnrsi/pysilfont'
__copyright__ = 'Copyright (c) 2019 SIL International (http://www.sil.org)'
__license__ = 'Released under the MIT License (http://opensource.org/licenses/MIT)'
__author__ = 'Bob Hallissy'
from silfont.core import execute
from xml.etree import ElementTree as ET
from fontTools import ttLib
import csv
import struct
argspec = [
('input', {'help': 'Input TypeTuner feature file'}, {'type': 'infile'}),
('output', {'help': 'Output TypeTuner feature file'}, {}),
('-m','--mapping', {'help': 'Input csv mapping file'}, {'type': 'incsv'}),
('-f','--ttf', {'help': 'Compiled TTF file'}, {}),
('-l','--log',{'help': 'Optional log file'}, {'type': 'outfile', 'def': '_tuneraliases.log', 'optlog': True}),
]
def doit(args) :
logger = args.logger
if args.mapping is None and args.ttf is None:
logger.log("One or both of -m and -f must be provided", "S")
featdoc = ET.parse(args.input)
root = featdoc.getroot()
if root.tag != 'all_features':
logger.log("Invalid TypeTuner feature file: missing root element", "S")
# Whitespace to add after each new alias:
tail = '\n\t\t'
# Find or add alliaes element
aliases = root.find('aliases')
if aliases is None:
aliases = ET.SubElement(root,'aliases')
aliases.tail = '\n'
added = set()
duplicates = set()
def setalias(name, value):
# detect duplicate names in input
if name in added:
duplicates.add(name)
else:
added.add(name)
# modify existing or add new alias
alias = aliases.find('alias[@name="{}"]'.format(name))
if alias is None:
alias = ET.SubElement(aliases, 'alias', {'name': name, 'value': value})
alias.tail = tail
else:
alias.set('value', value)
# Process mapping file if present:
if args.mapping:
# Mapping file is assumed to come from psfbuildfea, and should look like:
# lookupname,table,index
# e.g. DigitAlternates,GSUB,51
for (name,table,value) in args.mapping:
setalias(name, value)
# Process the ttf file if present
if args.ttf:
# Generate aliases for features.
# In this code featureID means the key used in FontUtils for finding the feature, e.g., "calt _2"
def dotable(t): # Common routine for GPOS and GSUB
currtag = None
currtagindex = None
flist = [] # list, in order, of (featureTag, featureID), per Font::TTF
for i in range(0,t.FeatureList.FeatureCount):
newtag = str(t.FeatureList.FeatureRecord[i].FeatureTag)
if currtag is None or currtag != newtag:
flist.append((newtag, newtag))
currtag = newtag
currtagindex = 0
else:
flist.append( (currtag, '{} _{}'.format(currtag, currtagindex)))
currtagindex += 1
fslList = {} # dictionary keyed by feature_script_lang values returning featureID
for s in t.ScriptList.ScriptRecord:
currtag = str(s.ScriptTag)
# At present only looking at the dflt lang entries
for findex in s.Script.DefaultLangSys.FeatureIndex:
fslList['{}_{}_dflt'.format(flist[findex][0],currtag)] = flist[findex][1]
# Now that we have them all, add them in sorted order.
for name, value in sorted(fslList.items()):
setalias(name,value)
# Open the TTF for processing
try:
f = ttLib.TTFont(args.ttf)
except Exception as e:
logger.log("Couldn't open font '{}' for reading : {}".format(args.ttf, str(e)),"S")
# Grab features from GSUB and GPOS
for tag in ('GSUB', 'GPOS'):
try:
dotable(f[tag].table)
except Exception as e:
logger.log("Failed to process {} table: {}".format(tag, str(e)), "W")
# Grab features from Graphite:
try:
for tag in sorted(f['Feat'].features.keys()):
if tag == '1':
continue
name = 'gr_' + tag
value = str(struct.unpack('>L', tag.encode())[0])
setalias(name,value)
except Exception as e:
logger.log("Failed to process Feat table: {}".format(str(e)), "W")
if len(duplicates):
logger.log("The following aliases defined more than once in input: {}".format(", ".join(sorted(duplicates))), "S")
# Success. Write the result
featdoc.write(args.output, encoding='UTF-8', xml_declaration=True)
def cmd() : execute(None,doit,argspec)
if __name__ == "__main__": cmd()
| 40.409836 | 122 | 0.577079 |
__doc__ = '''Merge lookup and feature aliases into TypeTuner feature file'''
__url__ = 'http://github.com/silnrsi/pysilfont'
__copyright__ = 'Copyright (c) 2019 SIL International (http://www.sil.org)'
__license__ = 'Released under the MIT License (http://opensource.org/licenses/MIT)'
__author__ = 'Bob Hallissy'
from silfont.core import execute
from xml.etree import ElementTree as ET
from fontTools import ttLib
import csv
import struct
argspec = [
('input', {'help': 'Input TypeTuner feature file'}, {'type': 'infile'}),
('output', {'help': 'Output TypeTuner feature file'}, {}),
('-m','--mapping', {'help': 'Input csv mapping file'}, {'type': 'incsv'}),
('-f','--ttf', {'help': 'Compiled TTF file'}, {}),
('-l','--log',{'help': 'Optional log file'}, {'type': 'outfile', 'def': '_tuneraliases.log', 'optlog': True}),
]
def doit(args) :
logger = args.logger
if args.mapping is None and args.ttf is None:
logger.log("One or both of -m and -f must be provided", "S")
featdoc = ET.parse(args.input)
root = featdoc.getroot()
if root.tag != 'all_features':
logger.log("Invalid TypeTuner feature file: missing root element", "S")
tail = '\n\t\t'
aliases = root.find('aliases')
if aliases is None:
aliases = ET.SubElement(root,'aliases')
aliases.tail = '\n'
added = set()
duplicates = set()
def setalias(name, value):
if name in added:
duplicates.add(name)
else:
added.add(name)
alias = aliases.find('alias[@name="{}"]'.format(name))
if alias is None:
alias = ET.SubElement(aliases, 'alias', {'name': name, 'value': value})
alias.tail = tail
else:
alias.set('value', value)
if args.mapping:
for (name,table,value) in args.mapping:
setalias(name, value)
if args.ttf:
def dotable(t):
currtag = None
currtagindex = None
flist = []
for i in range(0,t.FeatureList.FeatureCount):
newtag = str(t.FeatureList.FeatureRecord[i].FeatureTag)
if currtag is None or currtag != newtag:
flist.append((newtag, newtag))
currtag = newtag
currtagindex = 0
else:
flist.append( (currtag, '{} _{}'.format(currtag, currtagindex)))
currtagindex += 1
fslList = {}
for s in t.ScriptList.ScriptRecord:
currtag = str(s.ScriptTag)
for findex in s.Script.DefaultLangSys.FeatureIndex:
fslList['{}_{}_dflt'.format(flist[findex][0],currtag)] = flist[findex][1]
for name, value in sorted(fslList.items()):
setalias(name,value)
try:
f = ttLib.TTFont(args.ttf)
except Exception as e:
logger.log("Couldn't open font '{}' for reading : {}".format(args.ttf, str(e)),"S")
# Grab features from GSUB and GPOS
for tag in ('GSUB', 'GPOS'):
try:
dotable(f[tag].table)
except Exception as e:
logger.log("Failed to process {} table: {}".format(tag, str(e)), "W")
# Grab features from Graphite:
try:
for tag in sorted(f['Feat'].features.keys()):
if tag == '1':
continue
name = 'gr_' + tag
value = str(struct.unpack('>L', tag.encode())[0])
setalias(name,value)
except Exception as e:
logger.log("Failed to process Feat table: {}".format(str(e)), "W")
if len(duplicates):
logger.log("The following aliases defined more than once in input: {}".format(", ".join(sorted(duplicates))), "S")
# Success. Write the result
featdoc.write(args.output, encoding='UTF-8', xml_declaration=True)
def cmd() : execute(None,doit,argspec)
if __name__ == "__main__": cmd()
| true | true |
f71d0cdd41ebaf5f19271d970cbfc6c054ba49fb | 4,068 | py | Python | relbert_cl/train.py | asahi417/relbert | cb718e40fb452e88ccae1c271ccdea25013791b1 | [
"MIT"
] | 17 | 2021-09-10T14:49:41.000Z | 2022-01-26T13:18:02.000Z | relbert_cl/train.py | asahi417/relbert | cb718e40fb452e88ccae1c271ccdea25013791b1 | [
"MIT"
] | 2 | 2021-11-14T07:47:36.000Z | 2021-11-22T17:34:06.000Z | relbert_cl/train.py | asahi417/relbert | cb718e40fb452e88ccae1c271ccdea25013791b1 | [
"MIT"
] | 1 | 2021-12-14T01:35:05.000Z | 2021-12-14T01:35:05.000Z | """ Train RelBERT model. """
import argparse
import logging
import relbert
def config(parser):
# optimization
parser.add_argument('-s', '--softmax-loss', help='softmax loss', action='store_true')
parser.add_argument('-n', '--in-batch-negative', help='in batch negative', action='store_true')
parser.add_argument('-p', '--parent-contrast', help='hierarchical contrastive loss', action='store_true')
parser.add_argument('-e', '--epoch', help='training epochs', default=1, type=int)
parser.add_argument('--mse-margin', help='contrastive loss margin', default=1, type=int)
parser.add_argument('-b', '--batch', help='batch size', default=64, type=int)
parser.add_argument('--lr', help='learning rate', default=0.00002, type=float)
parser.add_argument('--random-seed', help='random seed', default=0, type=int)
parser.add_argument('--lr-decay', help='linear decay of learning rate after warmup', action='store_true')
parser.add_argument("--lr-warmup", help="linear warmup of lr", default=10, type=int)
parser.add_argument("--weight-decay", help="l2 penalty for weight decay", default=0, type=float)
parser.add_argument('--optimizer', help='optimizer `adam`/`adamax`/`adam`', default='adam', type=str)
parser.add_argument("--momentum", help="sgd momentum", default=0.9, type=float)
# training environment
parser.add_argument('--cache-dir', help='cache directory to store dataset', default=None, type=str)
parser.add_argument('--num-workers', help='workers for dataloder', default=5, type=int)
parser.add_argument('--fp16', help='fp16 for training', action='store_true')
parser.add_argument('--epoch-save', help='interval to save model weight', default=5, type=int)
parser.add_argument('--debug', help='log level', action='store_true')
parser.add_argument('--export', help='directory to export model weight file', required=True, type=str)
# language model
parser.add_argument('-m', '--model', help='language model', default='roberta-large', type=str)
parser.add_argument('-l', '--max-length', help='length', default=64, type=int)
parser.add_argument('--mode', help='lm mode', default='average_no_mask', type=str)
# data
parser.add_argument('--data', help='dataset', default='semeval2012', type=str)
parser.add_argument('--n-sample', help='sample size', default=10, type=int)
parser.add_argument('-t', '--template-type', help='template type or path to generated prompt file',
default='a', type=str)
return parser
def main():
argument_parser = argparse.ArgumentParser(description='Train RelBERT.')
argument_parser = config(argument_parser)
opt = argument_parser.parse_args()
# logging
level = logging.DEBUG if opt.debug else logging.INFO
logging.basicConfig(format='%(asctime)s %(levelname)-8s %(message)s', level=level, datefmt='%Y-%m-%d %H:%M:%S')
trainer = relbert.Trainer(
model=opt.model,
max_length=opt.max_length,
mode=opt.mode,
data=opt.data,
n_sample=opt.n_sample,
template_type=opt.template_type,
softmax_loss=opt.softmax_loss,
in_batch_negative=opt.in_batch_negative,
parent_contrast=opt.parent_contrast,
mse_margin=opt.mse_margin,
epoch=opt.epoch,
export=opt.export,
batch=opt.batch,
lr=opt.lr,
lr_decay=opt.lr_decay,
lr_warmup=opt.lr_warmup,
weight_decay=opt.weight_decay,
optimizer=opt.optimizer,
momentum=opt.momentum,
fp16=opt.fp16,
random_seed=opt.random_seed,
cache_dir=opt.cache_dir)
# add file handler
logger = logging.getLogger()
file_handler = logging.FileHandler('{}/training.log'.format(trainer.checkpoint_dir))
file_handler.setLevel(level)
file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)-8s %(message)s'))
logger.addHandler(file_handler)
trainer.train(num_workers=opt.num_workers, epoch_save=opt.epoch_save)
if __name__ == '__main__':
main()
| 47.302326 | 115 | 0.684366 | import argparse
import logging
import relbert
def config(parser):
parser.add_argument('-s', '--softmax-loss', help='softmax loss', action='store_true')
parser.add_argument('-n', '--in-batch-negative', help='in batch negative', action='store_true')
parser.add_argument('-p', '--parent-contrast', help='hierarchical contrastive loss', action='store_true')
parser.add_argument('-e', '--epoch', help='training epochs', default=1, type=int)
parser.add_argument('--mse-margin', help='contrastive loss margin', default=1, type=int)
parser.add_argument('-b', '--batch', help='batch size', default=64, type=int)
parser.add_argument('--lr', help='learning rate', default=0.00002, type=float)
parser.add_argument('--random-seed', help='random seed', default=0, type=int)
parser.add_argument('--lr-decay', help='linear decay of learning rate after warmup', action='store_true')
parser.add_argument("--lr-warmup", help="linear warmup of lr", default=10, type=int)
parser.add_argument("--weight-decay", help="l2 penalty for weight decay", default=0, type=float)
parser.add_argument('--optimizer', help='optimizer `adam`/`adamax`/`adam`', default='adam', type=str)
parser.add_argument("--momentum", help="sgd momentum", default=0.9, type=float)
parser.add_argument('--cache-dir', help='cache directory to store dataset', default=None, type=str)
parser.add_argument('--num-workers', help='workers for dataloder', default=5, type=int)
parser.add_argument('--fp16', help='fp16 for training', action='store_true')
parser.add_argument('--epoch-save', help='interval to save model weight', default=5, type=int)
parser.add_argument('--debug', help='log level', action='store_true')
parser.add_argument('--export', help='directory to export model weight file', required=True, type=str)
parser.add_argument('-m', '--model', help='language model', default='roberta-large', type=str)
parser.add_argument('-l', '--max-length', help='length', default=64, type=int)
parser.add_argument('--mode', help='lm mode', default='average_no_mask', type=str)
parser.add_argument('--data', help='dataset', default='semeval2012', type=str)
parser.add_argument('--n-sample', help='sample size', default=10, type=int)
parser.add_argument('-t', '--template-type', help='template type or path to generated prompt file',
default='a', type=str)
return parser
def main():
argument_parser = argparse.ArgumentParser(description='Train RelBERT.')
argument_parser = config(argument_parser)
opt = argument_parser.parse_args()
level = logging.DEBUG if opt.debug else logging.INFO
logging.basicConfig(format='%(asctime)s %(levelname)-8s %(message)s', level=level, datefmt='%Y-%m-%d %H:%M:%S')
trainer = relbert.Trainer(
model=opt.model,
max_length=opt.max_length,
mode=opt.mode,
data=opt.data,
n_sample=opt.n_sample,
template_type=opt.template_type,
softmax_loss=opt.softmax_loss,
in_batch_negative=opt.in_batch_negative,
parent_contrast=opt.parent_contrast,
mse_margin=opt.mse_margin,
epoch=opt.epoch,
export=opt.export,
batch=opt.batch,
lr=opt.lr,
lr_decay=opt.lr_decay,
lr_warmup=opt.lr_warmup,
weight_decay=opt.weight_decay,
optimizer=opt.optimizer,
momentum=opt.momentum,
fp16=opt.fp16,
random_seed=opt.random_seed,
cache_dir=opt.cache_dir)
logger = logging.getLogger()
file_handler = logging.FileHandler('{}/training.log'.format(trainer.checkpoint_dir))
file_handler.setLevel(level)
file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)-8s %(message)s'))
logger.addHandler(file_handler)
trainer.train(num_workers=opt.num_workers, epoch_save=opt.epoch_save)
if __name__ == '__main__':
main()
| true | true |
f71d0f6a0de31c3f3b2a50177251c5875a195dcd | 16,997 | py | Python | resources/mgltools_x86_64Linux2_1.5.6/lib/python2.5/site-packages/Pmw/Pmw_1_3/lib/PmwPanedWidget.py | J-E-J-S/aaRS-Pipeline | 43f59f28ab06e4b16328c3bc405cdddc6e69ac44 | [
"MIT"
] | 14 | 2015-10-16T11:35:30.000Z | 2021-05-12T15:31:16.000Z | resources/mgltools_x86_64Linux2_1.5.6/lib/python2.5/site-packages/Pmw/Pmw_1_3/lib/PmwPanedWidget.py | J-E-J-S/aaRS-Pipeline | 43f59f28ab06e4b16328c3bc405cdddc6e69ac44 | [
"MIT"
] | 1 | 2018-07-28T20:07:04.000Z | 2018-07-30T18:28:34.000Z | resources/mgltools_x86_64Linux2_1.5.6/lib/python2.5/site-packages/Pmw/Pmw_1_3/lib/PmwPanedWidget.py | J-E-J-S/aaRS-Pipeline | 43f59f28ab06e4b16328c3bc405cdddc6e69ac44 | [
"MIT"
] | 5 | 2015-10-16T12:44:41.000Z | 2019-10-02T02:45:38.000Z | # PanedWidget
# a frame which may contain several resizable sub-frames
import string
import sys
import types
import Tkinter
import Pmw
class PanedWidget(Pmw.MegaWidget):
def __init__(self, parent = None, **kw):
# Define the megawidget options.
INITOPT = Pmw.INITOPT
optiondefs = (
('command', None, None),
('orient', 'vertical', INITOPT),
('separatorrelief', 'sunken', INITOPT),
('separatorthickness', 2, INITOPT),
('handlesize', 8, INITOPT),
('hull_width', 400, None),
('hull_height', 400, None),
)
self.defineoptions(kw, optiondefs,
dynamicGroups = ('Frame', 'Separator', 'Handle'))
# Initialise the base class (after defining the options).
Pmw.MegaWidget.__init__(self, parent)
self.bind('<Configure>', self._handleConfigure)
if self['orient'] not in ('horizontal', 'vertical'):
raise ValueError, 'bad orient option ' + repr(self['orient']) + \
': must be either \'horizontal\' or \'vertical\''
self._separatorThickness = self['separatorthickness']
self._handleSize = self['handlesize']
self._paneNames = [] # List of pane names
self._paneAttrs = {} # Map from pane name to pane info
self._timerId = None
self._frame = {}
self._separator = []
self._button = []
self._totalSize = 0
self._movePending = 0
self._relsize = {}
self._relmin = {}
self._relmax = {}
self._size = {}
self._min = {}
self._max = {}
self._rootp = None
self._curSize = None
self._beforeLimit = None
self._afterLimit = None
self._buttonIsDown = 0
self._majorSize = 100
self._minorSize = 100
# Check keywords and initialise options.
self.initialiseoptions()
def insert(self, name, before = 0, **kw):
# Parse <kw> for options.
self._initPaneOptions(name)
self._parsePaneOptions(name, kw)
insertPos = self._nameToIndex(before)
atEnd = (insertPos == len(self._paneNames))
# Add the frame.
self._paneNames[insertPos:insertPos] = [name]
self._frame[name] = self.createcomponent(name,
(), 'Frame',
Tkinter.Frame, (self.interior(),))
# Add separator, if necessary.
if len(self._paneNames) > 1:
self._addSeparator()
else:
self._separator.append(None)
self._button.append(None)
# Add the new frame and adjust the PanedWidget
if atEnd:
size = self._size[name]
if size > 0 or self._relsize[name] is not None:
if self['orient'] == 'vertical':
self._frame[name].place(x=0, relwidth=1,
height=size, y=self._totalSize)
else:
self._frame[name].place(y=0, relheight=1,
width=size, x=self._totalSize)
else:
if self['orient'] == 'vertical':
self._frame[name].place(x=0, relwidth=1,
y=self._totalSize)
else:
self._frame[name].place(y=0, relheight=1,
x=self._totalSize)
else:
self._updateSizes()
self._totalSize = self._totalSize + self._size[name]
return self._frame[name]
def add(self, name, **kw):
return apply(self.insert, (name, len(self._paneNames)), kw)
def delete(self, name):
deletePos = self._nameToIndex(name)
name = self._paneNames[deletePos]
self.destroycomponent(name)
del self._paneNames[deletePos]
del self._frame[name]
del self._size[name]
del self._min[name]
del self._max[name]
del self._relsize[name]
del self._relmin[name]
del self._relmax[name]
last = len(self._paneNames)
del self._separator[last]
del self._button[last]
if last > 0:
self.destroycomponent(self._sepName(last))
self.destroycomponent(self._buttonName(last))
self._plotHandles()
def setnaturalsize(self):
self.update_idletasks()
totalWidth = 0
totalHeight = 0
maxWidth = 0
maxHeight = 0
for name in self._paneNames:
frame = self._frame[name]
w = frame.winfo_reqwidth()
h = frame.winfo_reqheight()
totalWidth = totalWidth + w
totalHeight = totalHeight + h
if maxWidth < w:
maxWidth = w
if maxHeight < h:
maxHeight = h
# Note that, since the hull is a frame, the width and height
# options specify the geometry *outside* the borderwidth and
# highlightthickness.
bw = string.atoi(str(self.cget('hull_borderwidth')))
hl = string.atoi(str(self.cget('hull_highlightthickness')))
extra = (bw + hl) * 2
if str(self.cget('orient')) == 'horizontal':
totalWidth = totalWidth + extra
maxHeight = maxHeight + extra
self.configure(hull_width = totalWidth, hull_height = maxHeight)
else:
totalHeight = (totalHeight + extra +
(len(self._paneNames) - 1) * self._separatorThickness)
maxWidth = maxWidth + extra
self.configure(hull_width = maxWidth, hull_height = totalHeight)
def move(self, name, newPos, newPosOffset = 0):
# see if we can spare ourselves some work
numPanes = len(self._paneNames)
if numPanes < 2:
return
newPos = self._nameToIndex(newPos) + newPosOffset
if newPos < 0 or newPos >=numPanes:
return
deletePos = self._nameToIndex(name)
if deletePos == newPos:
# inserting over ourself is a no-op
return
# delete name from old position in list
name = self._paneNames[deletePos]
del self._paneNames[deletePos]
# place in new position
self._paneNames[newPos:newPos] = [name]
# force everything to redraw
self._plotHandles()
self._updateSizes()
def _nameToIndex(self, nameOrIndex):
try:
pos = self._paneNames.index(nameOrIndex)
except ValueError:
pos = nameOrIndex
return pos
def _initPaneOptions(self, name):
# Set defaults.
self._size[name] = 0
self._relsize[name] = None
self._min[name] = 0
self._relmin[name] = None
self._max[name] = 100000
self._relmax[name] = None
def _parsePaneOptions(self, name, args):
# Parse <args> for options.
for arg, value in args.items():
if type(value) == types.FloatType:
relvalue = value
value = self._absSize(relvalue)
else:
relvalue = None
if arg == 'size':
self._size[name], self._relsize[name] = value, relvalue
elif arg == 'min':
self._min[name], self._relmin[name] = value, relvalue
elif arg == 'max':
self._max[name], self._relmax[name] = value, relvalue
else:
raise ValueError, 'keyword must be "size", "min", or "max"'
def _absSize(self, relvalue):
return int(round(relvalue * self._majorSize))
def _sepName(self, n):
return 'separator-%d' % n
def _buttonName(self, n):
return 'handle-%d' % n
def _addSeparator(self):
n = len(self._paneNames) - 1
downFunc = lambda event, s = self, num=n: s._btnDown(event, num)
upFunc = lambda event, s = self, num=n: s._btnUp(event, num)
moveFunc = lambda event, s = self, num=n: s._btnMove(event, num)
# Create the line dividing the panes.
sep = self.createcomponent(self._sepName(n),
(), 'Separator',
Tkinter.Frame, (self.interior(),),
borderwidth = 1,
relief = self['separatorrelief'])
self._separator.append(sep)
sep.bind('<ButtonPress-1>', downFunc)
sep.bind('<Any-ButtonRelease-1>', upFunc)
sep.bind('<B1-Motion>', moveFunc)
if self['orient'] == 'vertical':
cursor = 'sb_v_double_arrow'
sep.configure(height = self._separatorThickness,
width = 10000, cursor = cursor)
else:
cursor = 'sb_h_double_arrow'
sep.configure(width = self._separatorThickness,
height = 10000, cursor = cursor)
self._totalSize = self._totalSize + self._separatorThickness
# Create the handle on the dividing line.
handle = self.createcomponent(self._buttonName(n),
(), 'Handle',
Tkinter.Frame, (self.interior(),),
relief = 'raised',
borderwidth = 1,
width = self._handleSize,
height = self._handleSize,
cursor = cursor,
)
self._button.append(handle)
handle.bind('<ButtonPress-1>', downFunc)
handle.bind('<Any-ButtonRelease-1>', upFunc)
handle.bind('<B1-Motion>', moveFunc)
self._plotHandles()
for i in range(1, len(self._paneNames)):
self._separator[i].tkraise()
for i in range(1, len(self._paneNames)):
self._button[i].tkraise()
def _btnUp(self, event, item):
self._buttonIsDown = 0
self._updateSizes()
try:
self._button[item].configure(relief='raised')
except:
pass
def _btnDown(self, event, item):
self._button[item].configure(relief='sunken')
self._getMotionLimit(item)
self._buttonIsDown = 1
self._movePending = 0
def _handleConfigure(self, event = None):
self._getNaturalSizes()
if self._totalSize == 0:
return
iterRange = list(self._paneNames)
iterRange.reverse()
if self._majorSize > self._totalSize:
n = self._majorSize - self._totalSize
self._iterate(iterRange, self._grow, n)
elif self._majorSize < self._totalSize:
n = self._totalSize - self._majorSize
self._iterate(iterRange, self._shrink, n)
self._plotHandles()
self._updateSizes()
def _getNaturalSizes(self):
# Must call this in order to get correct winfo_width, winfo_height
self.update_idletasks()
self._totalSize = 0
if self['orient'] == 'vertical':
self._majorSize = self.winfo_height()
self._minorSize = self.winfo_width()
majorspec = Tkinter.Frame.winfo_reqheight
else:
self._majorSize = self.winfo_width()
self._minorSize = self.winfo_height()
majorspec = Tkinter.Frame.winfo_reqwidth
bw = string.atoi(str(self.cget('hull_borderwidth')))
hl = string.atoi(str(self.cget('hull_highlightthickness')))
extra = (bw + hl) * 2
self._majorSize = self._majorSize - extra
self._minorSize = self._minorSize - extra
if self._majorSize < 0:
self._majorSize = 0
if self._minorSize < 0:
self._minorSize = 0
for name in self._paneNames:
# adjust the absolute sizes first...
if self._relsize[name] is None:
#special case
if self._size[name] == 0:
self._size[name] = apply(majorspec, (self._frame[name],))
self._setrel(name)
else:
self._size[name] = self._absSize(self._relsize[name])
if self._relmin[name] is not None:
self._min[name] = self._absSize(self._relmin[name])
if self._relmax[name] is not None:
self._max[name] = self._absSize(self._relmax[name])
# now adjust sizes
if self._size[name] < self._min[name]:
self._size[name] = self._min[name]
self._setrel(name)
if self._size[name] > self._max[name]:
self._size[name] = self._max[name]
self._setrel(name)
self._totalSize = self._totalSize + self._size[name]
# adjust for separators
self._totalSize = (self._totalSize +
(len(self._paneNames) - 1) * self._separatorThickness)
def _setrel(self, name):
if self._relsize[name] is not None:
if self._majorSize != 0:
self._relsize[name] = round(self._size[name]) / self._majorSize
def _iterate(self, names, proc, n):
for i in names:
n = apply(proc, (i, n))
if n == 0:
break
def _grow(self, name, n):
canGrow = self._max[name] - self._size[name]
if canGrow > n:
self._size[name] = self._size[name] + n
self._setrel(name)
return 0
elif canGrow > 0:
self._size[name] = self._max[name]
self._setrel(name)
n = n - canGrow
return n
def _shrink(self, name, n):
canShrink = self._size[name] - self._min[name]
if canShrink > n:
self._size[name] = self._size[name] - n
self._setrel(name)
return 0
elif canShrink > 0:
self._size[name] = self._min[name]
self._setrel(name)
n = n - canShrink
return n
def _updateSizes(self):
totalSize = 0
for name in self._paneNames:
size = self._size[name]
if self['orient'] == 'vertical':
self._frame[name].place(x = 0, relwidth = 1,
y = totalSize,
height = size)
else:
self._frame[name].place(y = 0, relheight = 1,
x = totalSize,
width = size)
totalSize = totalSize + size + self._separatorThickness
# Invoke the callback command
cmd = self['command']
if callable(cmd):
cmd(map(lambda x, s = self: s._size[x], self._paneNames))
def _plotHandles(self):
if len(self._paneNames) == 0:
return
if self['orient'] == 'vertical':
btnp = self._minorSize - 13
else:
h = self._minorSize
if h > 18:
btnp = 9
else:
btnp = h - 9
firstPane = self._paneNames[0]
totalSize = self._size[firstPane]
first = 1
last = len(self._paneNames) - 1
# loop from first to last, inclusive
for i in range(1, last + 1):
handlepos = totalSize - 3
prevSize = self._size[self._paneNames[i - 1]]
nextSize = self._size[self._paneNames[i]]
offset1 = 0
if i == first:
if prevSize < 4:
offset1 = 4 - prevSize
else:
if prevSize < 8:
offset1 = (8 - prevSize) / 2
offset2 = 0
if i == last:
if nextSize < 4:
offset2 = nextSize - 4
else:
if nextSize < 8:
offset2 = (nextSize - 8) / 2
handlepos = handlepos + offset1
if self['orient'] == 'vertical':
height = 8 - offset1 + offset2
if height > 1:
self._button[i].configure(height = height)
self._button[i].place(x = btnp, y = handlepos)
else:
self._button[i].place_forget()
self._separator[i].place(x = 0, y = totalSize,
relwidth = 1)
else:
width = 8 - offset1 + offset2
if width > 1:
self._button[i].configure(width = width)
self._button[i].place(y = btnp, x = handlepos)
else:
self._button[i].place_forget()
self._separator[i].place(y = 0, x = totalSize,
relheight = 1)
totalSize = totalSize + nextSize + self._separatorThickness
def pane(self, name):
return self._frame[self._paneNames[self._nameToIndex(name)]]
# Return the name of all panes
def panes(self):
return list(self._paneNames)
def configurepane(self, name, **kw):
name = self._paneNames[self._nameToIndex(name)]
self._parsePaneOptions(name, kw)
self._handleConfigure()
def updatelayout(self):
self._handleConfigure()
def _getMotionLimit(self, item):
curBefore = (item - 1) * self._separatorThickness
minBefore, maxBefore = curBefore, curBefore
for name in self._paneNames[:item]:
curBefore = curBefore + self._size[name]
minBefore = minBefore + self._min[name]
maxBefore = maxBefore + self._max[name]
curAfter = (len(self._paneNames) - item) * self._separatorThickness
minAfter, maxAfter = curAfter, curAfter
for name in self._paneNames[item:]:
curAfter = curAfter + self._size[name]
minAfter = minAfter + self._min[name]
maxAfter = maxAfter + self._max[name]
beforeToGo = min(curBefore - minBefore, maxAfter - curAfter)
afterToGo = min(curAfter - minAfter, maxBefore - curBefore)
self._beforeLimit = curBefore - beforeToGo
self._afterLimit = curBefore + afterToGo
self._curSize = curBefore
self._plotHandles()
# Compress the motion so that update is quick even on slow machines
#
# theRootp = root position (either rootx or rooty)
def _btnMove(self, event, item):
self._rootp = event
if self._movePending == 0:
self._timerId = self.after_idle(
lambda s = self, i = item: s._btnMoveCompressed(i))
self._movePending = 1
def destroy(self):
if self._timerId is not None:
self.after_cancel(self._timerId)
self._timerId = None
Pmw.MegaWidget.destroy(self)
def _btnMoveCompressed(self, item):
if not self._buttonIsDown:
return
if self['orient'] == 'vertical':
p = self._rootp.y_root - self.winfo_rooty()
else:
p = self._rootp.x_root - self.winfo_rootx()
if p == self._curSize:
self._movePending = 0
return
if p < self._beforeLimit:
p = self._beforeLimit
if p >= self._afterLimit:
p = self._afterLimit
self._calculateChange(item, p)
self.update_idletasks()
self._movePending = 0
# Calculate the change in response to mouse motions
def _calculateChange(self, item, p):
if p < self._curSize:
self._moveBefore(item, p)
elif p > self._curSize:
self._moveAfter(item, p)
self._plotHandles()
def _moveBefore(self, item, p):
n = self._curSize - p
# Shrink the frames before
iterRange = list(self._paneNames[:item])
iterRange.reverse()
self._iterate(iterRange, self._shrink, n)
# Adjust the frames after
iterRange = self._paneNames[item:]
self._iterate(iterRange, self._grow, n)
self._curSize = p
def _moveAfter(self, item, p):
n = p - self._curSize
# Shrink the frames after
iterRange = self._paneNames[item:]
self._iterate(iterRange, self._shrink, n)
# Adjust the frames before
iterRange = list(self._paneNames[:item])
iterRange.reverse()
self._iterate(iterRange, self._grow, n)
self._curSize = p
| 27.065287 | 76 | 0.644702 |
import string
import sys
import types
import Tkinter
import Pmw
class PanedWidget(Pmw.MegaWidget):
def __init__(self, parent = None, **kw):
INITOPT = Pmw.INITOPT
optiondefs = (
('command', None, None),
('orient', 'vertical', INITOPT),
('separatorrelief', 'sunken', INITOPT),
('separatorthickness', 2, INITOPT),
('handlesize', 8, INITOPT),
('hull_width', 400, None),
('hull_height', 400, None),
)
self.defineoptions(kw, optiondefs,
dynamicGroups = ('Frame', 'Separator', 'Handle'))
Pmw.MegaWidget.__init__(self, parent)
self.bind('<Configure>', self._handleConfigure)
if self['orient'] not in ('horizontal', 'vertical'):
raise ValueError, 'bad orient option ' + repr(self['orient']) + \
': must be either \'horizontal\' or \'vertical\''
self._separatorThickness = self['separatorthickness']
self._handleSize = self['handlesize']
self._paneNames = []
self._paneAttrs = {}
self._timerId = None
self._frame = {}
self._separator = []
self._button = []
self._totalSize = 0
self._movePending = 0
self._relsize = {}
self._relmin = {}
self._relmax = {}
self._size = {}
self._min = {}
self._max = {}
self._rootp = None
self._curSize = None
self._beforeLimit = None
self._afterLimit = None
self._buttonIsDown = 0
self._majorSize = 100
self._minorSize = 100
self.initialiseoptions()
def insert(self, name, before = 0, **kw):
self._initPaneOptions(name)
self._parsePaneOptions(name, kw)
insertPos = self._nameToIndex(before)
atEnd = (insertPos == len(self._paneNames))
self._paneNames[insertPos:insertPos] = [name]
self._frame[name] = self.createcomponent(name,
(), 'Frame',
Tkinter.Frame, (self.interior(),))
if len(self._paneNames) > 1:
self._addSeparator()
else:
self._separator.append(None)
self._button.append(None)
if atEnd:
size = self._size[name]
if size > 0 or self._relsize[name] is not None:
if self['orient'] == 'vertical':
self._frame[name].place(x=0, relwidth=1,
height=size, y=self._totalSize)
else:
self._frame[name].place(y=0, relheight=1,
width=size, x=self._totalSize)
else:
if self['orient'] == 'vertical':
self._frame[name].place(x=0, relwidth=1,
y=self._totalSize)
else:
self._frame[name].place(y=0, relheight=1,
x=self._totalSize)
else:
self._updateSizes()
self._totalSize = self._totalSize + self._size[name]
return self._frame[name]
def add(self, name, **kw):
return apply(self.insert, (name, len(self._paneNames)), kw)
def delete(self, name):
deletePos = self._nameToIndex(name)
name = self._paneNames[deletePos]
self.destroycomponent(name)
del self._paneNames[deletePos]
del self._frame[name]
del self._size[name]
del self._min[name]
del self._max[name]
del self._relsize[name]
del self._relmin[name]
del self._relmax[name]
last = len(self._paneNames)
del self._separator[last]
del self._button[last]
if last > 0:
self.destroycomponent(self._sepName(last))
self.destroycomponent(self._buttonName(last))
self._plotHandles()
def setnaturalsize(self):
self.update_idletasks()
totalWidth = 0
totalHeight = 0
maxWidth = 0
maxHeight = 0
for name in self._paneNames:
frame = self._frame[name]
w = frame.winfo_reqwidth()
h = frame.winfo_reqheight()
totalWidth = totalWidth + w
totalHeight = totalHeight + h
if maxWidth < w:
maxWidth = w
if maxHeight < h:
maxHeight = h
bw = string.atoi(str(self.cget('hull_borderwidth')))
hl = string.atoi(str(self.cget('hull_highlightthickness')))
extra = (bw + hl) * 2
if str(self.cget('orient')) == 'horizontal':
totalWidth = totalWidth + extra
maxHeight = maxHeight + extra
self.configure(hull_width = totalWidth, hull_height = maxHeight)
else:
totalHeight = (totalHeight + extra +
(len(self._paneNames) - 1) * self._separatorThickness)
maxWidth = maxWidth + extra
self.configure(hull_width = maxWidth, hull_height = totalHeight)
def move(self, name, newPos, newPosOffset = 0):
numPanes = len(self._paneNames)
if numPanes < 2:
return
newPos = self._nameToIndex(newPos) + newPosOffset
if newPos < 0 or newPos >=numPanes:
return
deletePos = self._nameToIndex(name)
if deletePos == newPos:
return
name = self._paneNames[deletePos]
del self._paneNames[deletePos]
self._paneNames[newPos:newPos] = [name]
self._plotHandles()
self._updateSizes()
def _nameToIndex(self, nameOrIndex):
try:
pos = self._paneNames.index(nameOrIndex)
except ValueError:
pos = nameOrIndex
return pos
def _initPaneOptions(self, name):
self._size[name] = 0
self._relsize[name] = None
self._min[name] = 0
self._relmin[name] = None
self._max[name] = 100000
self._relmax[name] = None
def _parsePaneOptions(self, name, args):
for arg, value in args.items():
if type(value) == types.FloatType:
relvalue = value
value = self._absSize(relvalue)
else:
relvalue = None
if arg == 'size':
self._size[name], self._relsize[name] = value, relvalue
elif arg == 'min':
self._min[name], self._relmin[name] = value, relvalue
elif arg == 'max':
self._max[name], self._relmax[name] = value, relvalue
else:
raise ValueError, 'keyword must be "size", "min", or "max"'
def _absSize(self, relvalue):
return int(round(relvalue * self._majorSize))
def _sepName(self, n):
return 'separator-%d' % n
def _buttonName(self, n):
return 'handle-%d' % n
def _addSeparator(self):
n = len(self._paneNames) - 1
downFunc = lambda event, s = self, num=n: s._btnDown(event, num)
upFunc = lambda event, s = self, num=n: s._btnUp(event, num)
moveFunc = lambda event, s = self, num=n: s._btnMove(event, num)
sep = self.createcomponent(self._sepName(n),
(), 'Separator',
Tkinter.Frame, (self.interior(),),
borderwidth = 1,
relief = self['separatorrelief'])
self._separator.append(sep)
sep.bind('<ButtonPress-1>', downFunc)
sep.bind('<Any-ButtonRelease-1>', upFunc)
sep.bind('<B1-Motion>', moveFunc)
if self['orient'] == 'vertical':
cursor = 'sb_v_double_arrow'
sep.configure(height = self._separatorThickness,
width = 10000, cursor = cursor)
else:
cursor = 'sb_h_double_arrow'
sep.configure(width = self._separatorThickness,
height = 10000, cursor = cursor)
self._totalSize = self._totalSize + self._separatorThickness
handle = self.createcomponent(self._buttonName(n),
(), 'Handle',
Tkinter.Frame, (self.interior(),),
relief = 'raised',
borderwidth = 1,
width = self._handleSize,
height = self._handleSize,
cursor = cursor,
)
self._button.append(handle)
handle.bind('<ButtonPress-1>', downFunc)
handle.bind('<Any-ButtonRelease-1>', upFunc)
handle.bind('<B1-Motion>', moveFunc)
self._plotHandles()
for i in range(1, len(self._paneNames)):
self._separator[i].tkraise()
for i in range(1, len(self._paneNames)):
self._button[i].tkraise()
def _btnUp(self, event, item):
self._buttonIsDown = 0
self._updateSizes()
try:
self._button[item].configure(relief='raised')
except:
pass
def _btnDown(self, event, item):
self._button[item].configure(relief='sunken')
self._getMotionLimit(item)
self._buttonIsDown = 1
self._movePending = 0
def _handleConfigure(self, event = None):
self._getNaturalSizes()
if self._totalSize == 0:
return
iterRange = list(self._paneNames)
iterRange.reverse()
if self._majorSize > self._totalSize:
n = self._majorSize - self._totalSize
self._iterate(iterRange, self._grow, n)
elif self._majorSize < self._totalSize:
n = self._totalSize - self._majorSize
self._iterate(iterRange, self._shrink, n)
self._plotHandles()
self._updateSizes()
def _getNaturalSizes(self):
self.update_idletasks()
self._totalSize = 0
if self['orient'] == 'vertical':
self._majorSize = self.winfo_height()
self._minorSize = self.winfo_width()
majorspec = Tkinter.Frame.winfo_reqheight
else:
self._majorSize = self.winfo_width()
self._minorSize = self.winfo_height()
majorspec = Tkinter.Frame.winfo_reqwidth
bw = string.atoi(str(self.cget('hull_borderwidth')))
hl = string.atoi(str(self.cget('hull_highlightthickness')))
extra = (bw + hl) * 2
self._majorSize = self._majorSize - extra
self._minorSize = self._minorSize - extra
if self._majorSize < 0:
self._majorSize = 0
if self._minorSize < 0:
self._minorSize = 0
for name in self._paneNames:
if self._relsize[name] is None:
if self._size[name] == 0:
self._size[name] = apply(majorspec, (self._frame[name],))
self._setrel(name)
else:
self._size[name] = self._absSize(self._relsize[name])
if self._relmin[name] is not None:
self._min[name] = self._absSize(self._relmin[name])
if self._relmax[name] is not None:
self._max[name] = self._absSize(self._relmax[name])
if self._size[name] < self._min[name]:
self._size[name] = self._min[name]
self._setrel(name)
if self._size[name] > self._max[name]:
self._size[name] = self._max[name]
self._setrel(name)
self._totalSize = self._totalSize + self._size[name]
self._totalSize = (self._totalSize +
(len(self._paneNames) - 1) * self._separatorThickness)
def _setrel(self, name):
if self._relsize[name] is not None:
if self._majorSize != 0:
self._relsize[name] = round(self._size[name]) / self._majorSize
def _iterate(self, names, proc, n):
for i in names:
n = apply(proc, (i, n))
if n == 0:
break
def _grow(self, name, n):
canGrow = self._max[name] - self._size[name]
if canGrow > n:
self._size[name] = self._size[name] + n
self._setrel(name)
return 0
elif canGrow > 0:
self._size[name] = self._max[name]
self._setrel(name)
n = n - canGrow
return n
def _shrink(self, name, n):
canShrink = self._size[name] - self._min[name]
if canShrink > n:
self._size[name] = self._size[name] - n
self._setrel(name)
return 0
elif canShrink > 0:
self._size[name] = self._min[name]
self._setrel(name)
n = n - canShrink
return n
def _updateSizes(self):
totalSize = 0
for name in self._paneNames:
size = self._size[name]
if self['orient'] == 'vertical':
self._frame[name].place(x = 0, relwidth = 1,
y = totalSize,
height = size)
else:
self._frame[name].place(y = 0, relheight = 1,
x = totalSize,
width = size)
totalSize = totalSize + size + self._separatorThickness
cmd = self['command']
if callable(cmd):
cmd(map(lambda x, s = self: s._size[x], self._paneNames))
def _plotHandles(self):
if len(self._paneNames) == 0:
return
if self['orient'] == 'vertical':
btnp = self._minorSize - 13
else:
h = self._minorSize
if h > 18:
btnp = 9
else:
btnp = h - 9
firstPane = self._paneNames[0]
totalSize = self._size[firstPane]
first = 1
last = len(self._paneNames) - 1
for i in range(1, last + 1):
handlepos = totalSize - 3
prevSize = self._size[self._paneNames[i - 1]]
nextSize = self._size[self._paneNames[i]]
offset1 = 0
if i == first:
if prevSize < 4:
offset1 = 4 - prevSize
else:
if prevSize < 8:
offset1 = (8 - prevSize) / 2
offset2 = 0
if i == last:
if nextSize < 4:
offset2 = nextSize - 4
else:
if nextSize < 8:
offset2 = (nextSize - 8) / 2
handlepos = handlepos + offset1
if self['orient'] == 'vertical':
height = 8 - offset1 + offset2
if height > 1:
self._button[i].configure(height = height)
self._button[i].place(x = btnp, y = handlepos)
else:
self._button[i].place_forget()
self._separator[i].place(x = 0, y = totalSize,
relwidth = 1)
else:
width = 8 - offset1 + offset2
if width > 1:
self._button[i].configure(width = width)
self._button[i].place(y = btnp, x = handlepos)
else:
self._button[i].place_forget()
self._separator[i].place(y = 0, x = totalSize,
relheight = 1)
totalSize = totalSize + nextSize + self._separatorThickness
def pane(self, name):
return self._frame[self._paneNames[self._nameToIndex(name)]]
def panes(self):
return list(self._paneNames)
def configurepane(self, name, **kw):
name = self._paneNames[self._nameToIndex(name)]
self._parsePaneOptions(name, kw)
self._handleConfigure()
def updatelayout(self):
self._handleConfigure()
def _getMotionLimit(self, item):
curBefore = (item - 1) * self._separatorThickness
minBefore, maxBefore = curBefore, curBefore
for name in self._paneNames[:item]:
curBefore = curBefore + self._size[name]
minBefore = minBefore + self._min[name]
maxBefore = maxBefore + self._max[name]
curAfter = (len(self._paneNames) - item) * self._separatorThickness
minAfter, maxAfter = curAfter, curAfter
for name in self._paneNames[item:]:
curAfter = curAfter + self._size[name]
minAfter = minAfter + self._min[name]
maxAfter = maxAfter + self._max[name]
beforeToGo = min(curBefore - minBefore, maxAfter - curAfter)
afterToGo = min(curAfter - minAfter, maxBefore - curBefore)
self._beforeLimit = curBefore - beforeToGo
self._afterLimit = curBefore + afterToGo
self._curSize = curBefore
self._plotHandles()
def _btnMove(self, event, item):
self._rootp = event
if self._movePending == 0:
self._timerId = self.after_idle(
lambda s = self, i = item: s._btnMoveCompressed(i))
self._movePending = 1
def destroy(self):
if self._timerId is not None:
self.after_cancel(self._timerId)
self._timerId = None
Pmw.MegaWidget.destroy(self)
def _btnMoveCompressed(self, item):
if not self._buttonIsDown:
return
if self['orient'] == 'vertical':
p = self._rootp.y_root - self.winfo_rooty()
else:
p = self._rootp.x_root - self.winfo_rootx()
if p == self._curSize:
self._movePending = 0
return
if p < self._beforeLimit:
p = self._beforeLimit
if p >= self._afterLimit:
p = self._afterLimit
self._calculateChange(item, p)
self.update_idletasks()
self._movePending = 0
def _calculateChange(self, item, p):
if p < self._curSize:
self._moveBefore(item, p)
elif p > self._curSize:
self._moveAfter(item, p)
self._plotHandles()
def _moveBefore(self, item, p):
n = self._curSize - p
iterRange = list(self._paneNames[:item])
iterRange.reverse()
self._iterate(iterRange, self._shrink, n)
iterRange = self._paneNames[item:]
self._iterate(iterRange, self._grow, n)
self._curSize = p
def _moveAfter(self, item, p):
n = p - self._curSize
iterRange = self._paneNames[item:]
self._iterate(iterRange, self._shrink, n)
iterRange = list(self._paneNames[:item])
iterRange.reverse()
self._iterate(iterRange, self._grow, n)
self._curSize = p
| false | true |
f71d0fb7e6da71f240857ee7196759e47de60c68 | 11,547 | py | Python | src/virtual-wan/azext_vwan/vendored_sdks/v2021_08_01/v2021_08_01/operations/__init__.py | Caoxuyang/azure-cli-extensions | d2011261f29033cb31a1064256727d87049ab423 | [
"MIT"
] | 1 | 2022-02-01T18:50:12.000Z | 2022-02-01T18:50:12.000Z | src/virtual-wan/azext_vwan/vendored_sdks/v2021_08_01/v2021_08_01/operations/__init__.py | Caoxuyang/azure-cli-extensions | d2011261f29033cb31a1064256727d87049ab423 | [
"MIT"
] | 9 | 2022-03-25T19:35:49.000Z | 2022-03-31T06:09:47.000Z | src/virtual-wan/azext_vwan/vendored_sdks/v2021_08_01/v2021_08_01/operations/__init__.py | Caoxuyang/azure-cli-extensions | d2011261f29033cb31a1064256727d87049ab423 | [
"MIT"
] | 1 | 2022-02-14T21:43:29.000Z | 2022-02-14T21:43:29.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from ._operations import ApplicationGatewaysOperations
from ._operations import ApplicationGatewayPrivateLinkResourcesOperations
from ._operations import ApplicationGatewayPrivateEndpointConnectionsOperations
from ._operations import ApplicationSecurityGroupsOperations
from ._operations import AvailableDelegationsOperations
from ._operations import AvailableResourceGroupDelegationsOperations
from ._operations import AvailableServiceAliasesOperations
from ._operations import AzureFirewallsOperations
from ._operations import AzureFirewallFqdnTagsOperations
from ._operations import WebCategoriesOperations
from ._operations import BastionHostsOperations
from ._operations import NetworkManagementClientOperationsMixin
from ._operations import NetworkInterfacesOperations
from ._operations import PublicIPAddressesOperations
from ._operations import CustomIPPrefixesOperations
from ._operations import DdosCustomPoliciesOperations
from ._operations import DdosProtectionPlansOperations
from ._operations import DscpConfigurationOperations
from ._operations import AvailableEndpointServicesOperations
from ._operations import ExpressRouteCircuitAuthorizationsOperations
from ._operations import ExpressRouteCircuitPeeringsOperations
from ._operations import ExpressRouteCircuitConnectionsOperations
from ._operations import PeerExpressRouteCircuitConnectionsOperations
from ._operations import ExpressRouteCircuitsOperations
from ._operations import ExpressRouteServiceProvidersOperations
from ._operations import ExpressRouteCrossConnectionsOperations
from ._operations import ExpressRouteCrossConnectionPeeringsOperations
from ._operations import ExpressRoutePortsLocationsOperations
from ._operations import ExpressRoutePortsOperations
from ._operations import ExpressRouteLinksOperations
from ._operations import ExpressRoutePortAuthorizationsOperations
from ._operations import FirewallPoliciesOperations
from ._operations import FirewallPolicyRuleCollectionGroupsOperations
from ._operations import FirewallPolicyIdpsSignaturesOperations
from ._operations import FirewallPolicyIdpsSignaturesOverridesOperations
from ._operations import FirewallPolicyIdpsSignaturesFilterValuesOperations
from ._operations import IpAllocationsOperations
from ._operations import IpGroupsOperations
from ._operations import LoadBalancersOperations
from ._operations import LoadBalancerBackendAddressPoolsOperations
from ._operations import LoadBalancerFrontendIPConfigurationsOperations
from ._operations import InboundNatRulesOperations
from ._operations import LoadBalancerLoadBalancingRulesOperations
from ._operations import LoadBalancerOutboundRulesOperations
from ._operations import LoadBalancerNetworkInterfacesOperations
from ._operations import LoadBalancerProbesOperations
from ._operations import NatGatewaysOperations
from ._operations import NetworkInterfaceIPConfigurationsOperations
from ._operations import NetworkInterfaceLoadBalancersOperations
from ._operations import NetworkInterfaceTapConfigurationsOperations
from ._operations import NetworkProfilesOperations
from ._operations import NetworkSecurityGroupsOperations
from ._operations import SecurityRulesOperations
from ._operations import DefaultSecurityRulesOperations
from ._operations import NetworkVirtualAppliancesOperations
from ._operations import VirtualApplianceSitesOperations
from ._operations import VirtualApplianceSkusOperations
from ._operations import InboundSecurityRuleOperations
from ._operations import NetworkWatchersOperations
from ._operations import PacketCapturesOperations
from ._operations import ConnectionMonitorsOperations
from ._operations import FlowLogsOperations
from ._operations import Operations
from ._operations import PrivateEndpointsOperations
from ._operations import AvailablePrivateEndpointTypesOperations
from ._operations import PrivateDnsZoneGroupsOperations
from ._operations import PrivateLinkServicesOperations
from ._operations import PublicIPPrefixesOperations
from ._operations import RouteFiltersOperations
from ._operations import RouteFilterRulesOperations
from ._operations import RouteTablesOperations
from ._operations import RoutesOperations
from ._operations import SecurityPartnerProvidersOperations
from ._operations import BgpServiceCommunitiesOperations
from ._operations import ServiceEndpointPoliciesOperations
from ._operations import ServiceEndpointPolicyDefinitionsOperations
from ._operations import ServiceTagsOperations
from ._operations import ServiceTagInformationOperations
from ._operations import UsagesOperations
from ._operations import VirtualNetworksOperations
from ._operations import SubnetsOperations
from ._operations import ResourceNavigationLinksOperations
from ._operations import ServiceAssociationLinksOperations
from ._operations import VirtualNetworkPeeringsOperations
from ._operations import VirtualNetworkGatewaysOperations
from ._operations import VirtualNetworkGatewayConnectionsOperations
from ._operations import LocalNetworkGatewaysOperations
from ._operations import VirtualNetworkGatewayNatRulesOperations
from ._operations import VirtualNetworkTapsOperations
from ._operations import VirtualRoutersOperations
from ._operations import VirtualRouterPeeringsOperations
from ._operations import VirtualWansOperations
from ._operations import VpnSitesOperations
from ._operations import VpnSiteLinksOperations
from ._operations import VpnSitesConfigurationOperations
from ._operations import VpnServerConfigurationsOperations
from ._operations import ConfigurationPolicyGroupsOperations
from ._operations import VirtualHubsOperations
from ._operations import HubVirtualNetworkConnectionsOperations
from ._operations import VpnGatewaysOperations
from ._operations import VpnLinkConnectionsOperations
from ._operations import VpnConnectionsOperations
from ._operations import VpnSiteLinkConnectionsOperations
from ._operations import NatRulesOperations
from ._operations import P2SVpnGatewaysOperations
from ._operations import VpnServerConfigurationsAssociatedWithVirtualWanOperations
from ._operations import VirtualHubRouteTableV2SOperations
from ._operations import ExpressRouteGatewaysOperations
from ._operations import ExpressRouteConnectionsOperations
from ._operations import VirtualHubBgpConnectionOperations
from ._operations import VirtualHubBgpConnectionsOperations
from ._operations import VirtualHubIpConfigurationOperations
from ._operations import HubRouteTablesOperations
from ._operations import RoutingIntentOperations
from ._operations import WebApplicationFirewallPoliciesOperations
__all__ = [
'ApplicationGatewaysOperations',
'ApplicationGatewayPrivateLinkResourcesOperations',
'ApplicationGatewayPrivateEndpointConnectionsOperations',
'ApplicationSecurityGroupsOperations',
'AvailableDelegationsOperations',
'AvailableResourceGroupDelegationsOperations',
'AvailableServiceAliasesOperations',
'AzureFirewallsOperations',
'AzureFirewallFqdnTagsOperations',
'WebCategoriesOperations',
'BastionHostsOperations',
'NetworkManagementClientOperationsMixin',
'NetworkInterfacesOperations',
'PublicIPAddressesOperations',
'CustomIPPrefixesOperations',
'DdosCustomPoliciesOperations',
'DdosProtectionPlansOperations',
'DscpConfigurationOperations',
'AvailableEndpointServicesOperations',
'ExpressRouteCircuitAuthorizationsOperations',
'ExpressRouteCircuitPeeringsOperations',
'ExpressRouteCircuitConnectionsOperations',
'PeerExpressRouteCircuitConnectionsOperations',
'ExpressRouteCircuitsOperations',
'ExpressRouteServiceProvidersOperations',
'ExpressRouteCrossConnectionsOperations',
'ExpressRouteCrossConnectionPeeringsOperations',
'ExpressRoutePortsLocationsOperations',
'ExpressRoutePortsOperations',
'ExpressRouteLinksOperations',
'ExpressRoutePortAuthorizationsOperations',
'FirewallPoliciesOperations',
'FirewallPolicyRuleCollectionGroupsOperations',
'FirewallPolicyIdpsSignaturesOperations',
'FirewallPolicyIdpsSignaturesOverridesOperations',
'FirewallPolicyIdpsSignaturesFilterValuesOperations',
'IpAllocationsOperations',
'IpGroupsOperations',
'LoadBalancersOperations',
'LoadBalancerBackendAddressPoolsOperations',
'LoadBalancerFrontendIPConfigurationsOperations',
'InboundNatRulesOperations',
'LoadBalancerLoadBalancingRulesOperations',
'LoadBalancerOutboundRulesOperations',
'LoadBalancerNetworkInterfacesOperations',
'LoadBalancerProbesOperations',
'NatGatewaysOperations',
'NetworkInterfaceIPConfigurationsOperations',
'NetworkInterfaceLoadBalancersOperations',
'NetworkInterfaceTapConfigurationsOperations',
'NetworkProfilesOperations',
'NetworkSecurityGroupsOperations',
'SecurityRulesOperations',
'DefaultSecurityRulesOperations',
'NetworkVirtualAppliancesOperations',
'VirtualApplianceSitesOperations',
'VirtualApplianceSkusOperations',
'InboundSecurityRuleOperations',
'NetworkWatchersOperations',
'PacketCapturesOperations',
'ConnectionMonitorsOperations',
'FlowLogsOperations',
'Operations',
'PrivateEndpointsOperations',
'AvailablePrivateEndpointTypesOperations',
'PrivateDnsZoneGroupsOperations',
'PrivateLinkServicesOperations',
'PublicIPPrefixesOperations',
'RouteFiltersOperations',
'RouteFilterRulesOperations',
'RouteTablesOperations',
'RoutesOperations',
'SecurityPartnerProvidersOperations',
'BgpServiceCommunitiesOperations',
'ServiceEndpointPoliciesOperations',
'ServiceEndpointPolicyDefinitionsOperations',
'ServiceTagsOperations',
'ServiceTagInformationOperations',
'UsagesOperations',
'VirtualNetworksOperations',
'SubnetsOperations',
'ResourceNavigationLinksOperations',
'ServiceAssociationLinksOperations',
'VirtualNetworkPeeringsOperations',
'VirtualNetworkGatewaysOperations',
'VirtualNetworkGatewayConnectionsOperations',
'LocalNetworkGatewaysOperations',
'VirtualNetworkGatewayNatRulesOperations',
'VirtualNetworkTapsOperations',
'VirtualRoutersOperations',
'VirtualRouterPeeringsOperations',
'VirtualWansOperations',
'VpnSitesOperations',
'VpnSiteLinksOperations',
'VpnSitesConfigurationOperations',
'VpnServerConfigurationsOperations',
'ConfigurationPolicyGroupsOperations',
'VirtualHubsOperations',
'HubVirtualNetworkConnectionsOperations',
'VpnGatewaysOperations',
'VpnLinkConnectionsOperations',
'VpnConnectionsOperations',
'VpnSiteLinkConnectionsOperations',
'NatRulesOperations',
'P2SVpnGatewaysOperations',
'VpnServerConfigurationsAssociatedWithVirtualWanOperations',
'VirtualHubRouteTableV2SOperations',
'ExpressRouteGatewaysOperations',
'ExpressRouteConnectionsOperations',
'VirtualHubBgpConnectionOperations',
'VirtualHubBgpConnectionsOperations',
'VirtualHubIpConfigurationOperations',
'HubRouteTablesOperations',
'RoutingIntentOperations',
'WebApplicationFirewallPoliciesOperations',
]
| 47.714876 | 94 | 0.850611 |
from ._operations import ApplicationGatewaysOperations
from ._operations import ApplicationGatewayPrivateLinkResourcesOperations
from ._operations import ApplicationGatewayPrivateEndpointConnectionsOperations
from ._operations import ApplicationSecurityGroupsOperations
from ._operations import AvailableDelegationsOperations
from ._operations import AvailableResourceGroupDelegationsOperations
from ._operations import AvailableServiceAliasesOperations
from ._operations import AzureFirewallsOperations
from ._operations import AzureFirewallFqdnTagsOperations
from ._operations import WebCategoriesOperations
from ._operations import BastionHostsOperations
from ._operations import NetworkManagementClientOperationsMixin
from ._operations import NetworkInterfacesOperations
from ._operations import PublicIPAddressesOperations
from ._operations import CustomIPPrefixesOperations
from ._operations import DdosCustomPoliciesOperations
from ._operations import DdosProtectionPlansOperations
from ._operations import DscpConfigurationOperations
from ._operations import AvailableEndpointServicesOperations
from ._operations import ExpressRouteCircuitAuthorizationsOperations
from ._operations import ExpressRouteCircuitPeeringsOperations
from ._operations import ExpressRouteCircuitConnectionsOperations
from ._operations import PeerExpressRouteCircuitConnectionsOperations
from ._operations import ExpressRouteCircuitsOperations
from ._operations import ExpressRouteServiceProvidersOperations
from ._operations import ExpressRouteCrossConnectionsOperations
from ._operations import ExpressRouteCrossConnectionPeeringsOperations
from ._operations import ExpressRoutePortsLocationsOperations
from ._operations import ExpressRoutePortsOperations
from ._operations import ExpressRouteLinksOperations
from ._operations import ExpressRoutePortAuthorizationsOperations
from ._operations import FirewallPoliciesOperations
from ._operations import FirewallPolicyRuleCollectionGroupsOperations
from ._operations import FirewallPolicyIdpsSignaturesOperations
from ._operations import FirewallPolicyIdpsSignaturesOverridesOperations
from ._operations import FirewallPolicyIdpsSignaturesFilterValuesOperations
from ._operations import IpAllocationsOperations
from ._operations import IpGroupsOperations
from ._operations import LoadBalancersOperations
from ._operations import LoadBalancerBackendAddressPoolsOperations
from ._operations import LoadBalancerFrontendIPConfigurationsOperations
from ._operations import InboundNatRulesOperations
from ._operations import LoadBalancerLoadBalancingRulesOperations
from ._operations import LoadBalancerOutboundRulesOperations
from ._operations import LoadBalancerNetworkInterfacesOperations
from ._operations import LoadBalancerProbesOperations
from ._operations import NatGatewaysOperations
from ._operations import NetworkInterfaceIPConfigurationsOperations
from ._operations import NetworkInterfaceLoadBalancersOperations
from ._operations import NetworkInterfaceTapConfigurationsOperations
from ._operations import NetworkProfilesOperations
from ._operations import NetworkSecurityGroupsOperations
from ._operations import SecurityRulesOperations
from ._operations import DefaultSecurityRulesOperations
from ._operations import NetworkVirtualAppliancesOperations
from ._operations import VirtualApplianceSitesOperations
from ._operations import VirtualApplianceSkusOperations
from ._operations import InboundSecurityRuleOperations
from ._operations import NetworkWatchersOperations
from ._operations import PacketCapturesOperations
from ._operations import ConnectionMonitorsOperations
from ._operations import FlowLogsOperations
from ._operations import Operations
from ._operations import PrivateEndpointsOperations
from ._operations import AvailablePrivateEndpointTypesOperations
from ._operations import PrivateDnsZoneGroupsOperations
from ._operations import PrivateLinkServicesOperations
from ._operations import PublicIPPrefixesOperations
from ._operations import RouteFiltersOperations
from ._operations import RouteFilterRulesOperations
from ._operations import RouteTablesOperations
from ._operations import RoutesOperations
from ._operations import SecurityPartnerProvidersOperations
from ._operations import BgpServiceCommunitiesOperations
from ._operations import ServiceEndpointPoliciesOperations
from ._operations import ServiceEndpointPolicyDefinitionsOperations
from ._operations import ServiceTagsOperations
from ._operations import ServiceTagInformationOperations
from ._operations import UsagesOperations
from ._operations import VirtualNetworksOperations
from ._operations import SubnetsOperations
from ._operations import ResourceNavigationLinksOperations
from ._operations import ServiceAssociationLinksOperations
from ._operations import VirtualNetworkPeeringsOperations
from ._operations import VirtualNetworkGatewaysOperations
from ._operations import VirtualNetworkGatewayConnectionsOperations
from ._operations import LocalNetworkGatewaysOperations
from ._operations import VirtualNetworkGatewayNatRulesOperations
from ._operations import VirtualNetworkTapsOperations
from ._operations import VirtualRoutersOperations
from ._operations import VirtualRouterPeeringsOperations
from ._operations import VirtualWansOperations
from ._operations import VpnSitesOperations
from ._operations import VpnSiteLinksOperations
from ._operations import VpnSitesConfigurationOperations
from ._operations import VpnServerConfigurationsOperations
from ._operations import ConfigurationPolicyGroupsOperations
from ._operations import VirtualHubsOperations
from ._operations import HubVirtualNetworkConnectionsOperations
from ._operations import VpnGatewaysOperations
from ._operations import VpnLinkConnectionsOperations
from ._operations import VpnConnectionsOperations
from ._operations import VpnSiteLinkConnectionsOperations
from ._operations import NatRulesOperations
from ._operations import P2SVpnGatewaysOperations
from ._operations import VpnServerConfigurationsAssociatedWithVirtualWanOperations
from ._operations import VirtualHubRouteTableV2SOperations
from ._operations import ExpressRouteGatewaysOperations
from ._operations import ExpressRouteConnectionsOperations
from ._operations import VirtualHubBgpConnectionOperations
from ._operations import VirtualHubBgpConnectionsOperations
from ._operations import VirtualHubIpConfigurationOperations
from ._operations import HubRouteTablesOperations
from ._operations import RoutingIntentOperations
from ._operations import WebApplicationFirewallPoliciesOperations
__all__ = [
'ApplicationGatewaysOperations',
'ApplicationGatewayPrivateLinkResourcesOperations',
'ApplicationGatewayPrivateEndpointConnectionsOperations',
'ApplicationSecurityGroupsOperations',
'AvailableDelegationsOperations',
'AvailableResourceGroupDelegationsOperations',
'AvailableServiceAliasesOperations',
'AzureFirewallsOperations',
'AzureFirewallFqdnTagsOperations',
'WebCategoriesOperations',
'BastionHostsOperations',
'NetworkManagementClientOperationsMixin',
'NetworkInterfacesOperations',
'PublicIPAddressesOperations',
'CustomIPPrefixesOperations',
'DdosCustomPoliciesOperations',
'DdosProtectionPlansOperations',
'DscpConfigurationOperations',
'AvailableEndpointServicesOperations',
'ExpressRouteCircuitAuthorizationsOperations',
'ExpressRouteCircuitPeeringsOperations',
'ExpressRouteCircuitConnectionsOperations',
'PeerExpressRouteCircuitConnectionsOperations',
'ExpressRouteCircuitsOperations',
'ExpressRouteServiceProvidersOperations',
'ExpressRouteCrossConnectionsOperations',
'ExpressRouteCrossConnectionPeeringsOperations',
'ExpressRoutePortsLocationsOperations',
'ExpressRoutePortsOperations',
'ExpressRouteLinksOperations',
'ExpressRoutePortAuthorizationsOperations',
'FirewallPoliciesOperations',
'FirewallPolicyRuleCollectionGroupsOperations',
'FirewallPolicyIdpsSignaturesOperations',
'FirewallPolicyIdpsSignaturesOverridesOperations',
'FirewallPolicyIdpsSignaturesFilterValuesOperations',
'IpAllocationsOperations',
'IpGroupsOperations',
'LoadBalancersOperations',
'LoadBalancerBackendAddressPoolsOperations',
'LoadBalancerFrontendIPConfigurationsOperations',
'InboundNatRulesOperations',
'LoadBalancerLoadBalancingRulesOperations',
'LoadBalancerOutboundRulesOperations',
'LoadBalancerNetworkInterfacesOperations',
'LoadBalancerProbesOperations',
'NatGatewaysOperations',
'NetworkInterfaceIPConfigurationsOperations',
'NetworkInterfaceLoadBalancersOperations',
'NetworkInterfaceTapConfigurationsOperations',
'NetworkProfilesOperations',
'NetworkSecurityGroupsOperations',
'SecurityRulesOperations',
'DefaultSecurityRulesOperations',
'NetworkVirtualAppliancesOperations',
'VirtualApplianceSitesOperations',
'VirtualApplianceSkusOperations',
'InboundSecurityRuleOperations',
'NetworkWatchersOperations',
'PacketCapturesOperations',
'ConnectionMonitorsOperations',
'FlowLogsOperations',
'Operations',
'PrivateEndpointsOperations',
'AvailablePrivateEndpointTypesOperations',
'PrivateDnsZoneGroupsOperations',
'PrivateLinkServicesOperations',
'PublicIPPrefixesOperations',
'RouteFiltersOperations',
'RouteFilterRulesOperations',
'RouteTablesOperations',
'RoutesOperations',
'SecurityPartnerProvidersOperations',
'BgpServiceCommunitiesOperations',
'ServiceEndpointPoliciesOperations',
'ServiceEndpointPolicyDefinitionsOperations',
'ServiceTagsOperations',
'ServiceTagInformationOperations',
'UsagesOperations',
'VirtualNetworksOperations',
'SubnetsOperations',
'ResourceNavigationLinksOperations',
'ServiceAssociationLinksOperations',
'VirtualNetworkPeeringsOperations',
'VirtualNetworkGatewaysOperations',
'VirtualNetworkGatewayConnectionsOperations',
'LocalNetworkGatewaysOperations',
'VirtualNetworkGatewayNatRulesOperations',
'VirtualNetworkTapsOperations',
'VirtualRoutersOperations',
'VirtualRouterPeeringsOperations',
'VirtualWansOperations',
'VpnSitesOperations',
'VpnSiteLinksOperations',
'VpnSitesConfigurationOperations',
'VpnServerConfigurationsOperations',
'ConfigurationPolicyGroupsOperations',
'VirtualHubsOperations',
'HubVirtualNetworkConnectionsOperations',
'VpnGatewaysOperations',
'VpnLinkConnectionsOperations',
'VpnConnectionsOperations',
'VpnSiteLinkConnectionsOperations',
'NatRulesOperations',
'P2SVpnGatewaysOperations',
'VpnServerConfigurationsAssociatedWithVirtualWanOperations',
'VirtualHubRouteTableV2SOperations',
'ExpressRouteGatewaysOperations',
'ExpressRouteConnectionsOperations',
'VirtualHubBgpConnectionOperations',
'VirtualHubBgpConnectionsOperations',
'VirtualHubIpConfigurationOperations',
'HubRouteTablesOperations',
'RoutingIntentOperations',
'WebApplicationFirewallPoliciesOperations',
]
| true | true |
f71d0fd842f2064dbce2597cdedbee45810477a0 | 192 | py | Python | ticket_universe/charsets.py | lotify/ticket_universe | 1947cc9d6a555a68af5b39d252cac3ecef06400c | [
"MIT"
] | 4 | 2019-01-24T12:38:37.000Z | 2019-03-26T12:36:18.000Z | ticket_universe/charsets.py | lotify/ticket_universe | 1947cc9d6a555a68af5b39d252cac3ecef06400c | [
"MIT"
] | null | null | null | ticket_universe/charsets.py | lotify/ticket_universe | 1947cc9d6a555a68af5b39d252cac3ecef06400c | [
"MIT"
] | null | null | null | def latin() -> [str]:
"""[A-Z]"""
return list("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
def safe_latin() -> [str]:
"""[A-Z] excluding (O, I, L)"""
return list("ABCDEFGHJKMNPQRSTUVWXYZ")
| 21.333333 | 45 | 0.583333 | def latin() -> [str]:
return list("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
def safe_latin() -> [str]:
return list("ABCDEFGHJKMNPQRSTUVWXYZ")
| true | true |
f71d10310130fbb663c3c6a549057fdf4e5e6935 | 56 | py | Python | app/config_combos/schema.py | rafiq10/rrhh_tdd_backend | fecbe4f3dd249b31f71e4b63904c565e207e45f9 | [
"bzip2-1.0.6"
] | null | null | null | app/config_combos/schema.py | rafiq10/rrhh_tdd_backend | fecbe4f3dd249b31f71e4b63904c565e207e45f9 | [
"bzip2-1.0.6"
] | 2 | 2021-03-25T22:50:12.000Z | 2021-04-30T20:53:22.000Z | app/config_combos/schema.py | rafiq10/rrhh_tdd_backend | fecbe4f3dd249b31f71e4b63904c565e207e45f9 | [
"bzip2-1.0.6"
] | null | null | null | from .combos.combos import CombosModel, get_combo_tables | 56 | 56 | 0.875 | from .combos.combos import CombosModel, get_combo_tables | true | true |
f71d123c09ba6000e80271689145557882b00944 | 12,061 | py | Python | content/generate_folder_directory.py | lingcog/2019-CS109A | f1eaa62976fe989c3ad3f3ab4b8dd5d71574a2c3 | [
"MIT"
] | 442 | 2019-06-11T06:47:00.000Z | 2022-03-12T11:19:31.000Z | content/generate_folder_directory.py | lelandroberts97/2019-CS109A | 976da6b65c26fd3c5db285cbf9ec9cde92751a70 | [
"MIT"
] | 3 | 2019-09-23T17:32:51.000Z | 2022-02-09T06:06:00.000Z | content/generate_folder_directory.py | lelandroberts97/2019-CS109A | 976da6b65c26fd3c5db285cbf9ec9cde92751a70 | [
"MIT"
] | 486 | 2019-06-17T05:01:07.000Z | 2022-03-13T20:30:44.000Z | import os
import sys
from datetime import datetime
class GenerateStructure:
def __init__(self, number_of_lectures, number_of_labs, number_of_homework, number_of_sections,
number_of_advanced_sections, folders,
default_directory, default_directory_lectures, default_directory_lecture_playground,
default_directory_labs, default_directory_labs_playground, default_directory_homework,
default_directory_homework_playground, default_directory_advanced_sections,
default_directory_advanced_sections_playground,
default_directory_sections_playground):
self.number_of_lectures = number_of_lectures
self.number_of_labs = number_of_labs
self.number_of_homework = number_of_homework
self.number_of_sections = number_of_sections
self.number_of_advanced_sections = number_of_advanced_sections
self.folders = folders
self.default_directory = default_directory
self.default_directory_lectures = default_directory_lectures
self.default_directory_lecture_playground = default_directory_lecture_playground
self.default_directory_labs = default_directory_labs
self.default_directory_labs_playground = default_directory_labs_playground
self.default_directory_homework = default_directory_homework
self.default_directory_homework_playground = default_directory_homework_playground
self.default_directory_advanced_sections = default_directory_advanced_sections
self.default_directory_sections = default_directory_sections
self.default_directory_sections_playground = default_directory_sections_playground
@staticmethod
def create_directory(directory, fold):
os.makedirs(directory + fold)
open(directory + fold + '/.placeholder', 'w').close()
@staticmethod
def create_index(directory, title, category, slug, i):
with open(directory + "index.md", 'a')as index:
index.write("Title: " + title + str(i) + ':\n' +
"Category: " + category + '\n' +
"Date: " + datetime.today().strftime('%Y-%m-%d') + '\n' +
"Author: " + '\n' +
"Slug: " + slug + str(i) + '\n' +
"Tages: ADD TAGS HERE" +
'\n\n\n' + "## Slides" )
# Create Lectures folders
def create_lectures(self, directory, number_of_hw, folders):
for i in range(1, number_of_hw+1):
if not os.path.exists(directory):
os.makedirs(directory)
if not os.path.exists(directory + "lecture" + str(i)):
os.makedirs(directory + "lecture" + str(i))
directory_lectures = directory + "lecture" + str(i) + '/'
for fold in folders:
self.create_directory(directory_lectures, fold)
self.create_index(directory_lectures, "Lecture ", "lectures" , "lecture", i)
else:
print("The directory : '", directory + "lecture" + str(i), "' already exist.")
# Create Lectures playground folders
def create_lecture_playground(self, directory, number_of_lectures, folders):
for i in range(1, number_of_lectures+1):
if not os.path.exists(directory):
os.makedirs(directory)
if not os.path.exists(directory + "lecture" + str(i)):
os.makedirs(directory + "lecture" + str(i))
directory_lectures = directory + "lecture" + str(i) + '/'
for fold in folders:
self.create_directory(directory_lectures, fold)
else:
print("The directory : '", directory + "lecture" + str(i), "' already exist.")
# Create Labs folders
def create_labs(self, directory, number_of_labs, folders):
for i in range(1, number_of_labs+1):
if not os.path.exists(directory):
os.makedirs(directory)
if not os.path.exists(directory + "lab" + str(i)):
os.makedirs(directory + "lab" + str(i))
directory_lab = directory + "lab" + str(i) + '/'
for fold in folders:
self.create_directory(directory_lab, fold)
self.create_index(directory_lab, "Lab ", "labs", "lab", i)
else:
print("The directory : '", directory + "lab" + str(i), "' already exist.")
# Create Homework folders
def create_homework(self, directory, number_of_labs, folders):
for i in range(0, number_of_labs+1):
if not os.path.exists(directory):
os.makedirs(directory)
if not os.path.exists(directory + "how" + str(i)):
os.makedirs(directory + "how" + str(i))
directory_homework = directory + "how" + str(i) + '/'
for fold in folders:
self.create_directory(directory_homework, fold)
else:
print("The directory : '", directory + "homework" + str(i), "' already exist.")
# Create Sections folders
def create_section(self, directory, sections, folders):
for i in range(1, sections + 1):
if not os.path.exists(directory):
os.makedirs(directory)
if not os.path.exists(directory + "section" + str(i)):
os.makedirs(directory + "section" + str(i))
directory_section = directory + "section" + str(i) + '/'
for fold in folders:
self.create_directory(directory_section, fold)
self.create_index(directory_section, "Sections ", "section", "section", i)
else:
print("The directory : '", directory + "Sections" + str(i), "' already exist.")
# Create AdvancedSections folders
def create_a_section(self, directory, advanced_sections, folders):
for i in range(1, advanced_sections+1):
if not os.path.exists(directory):
os.makedirs(directory)
if not os.path.exists(directory + "a-sec" + str(i)):
os.makedirs(directory + "a-sec" + str(i))
directory_a_section = directory + "a-sec" + str(i) + '/'
for fold in folders:
self.create_directory(directory_a_section, fold)
self.create_index(directory_a_section, "Advanced Sections ", "a-sections", "a-sections", i)
else:
print("The directory : '", directory + "Advanced Sections" + str(i), "' already exist.")
if __name__ == "__main__":
number_of_lectures = 24
number_of_labs = 13
number_of_homework = 8
number_of_advanced_sections = 6
number_of_sections = 13
folders = ["data", "fig", "notes", "presentation"]
default_directory = os.path.dirname(os.path.realpath(__file__)) + '/'
default_directory_lectures = default_directory + "lectures/"
default_directory_lecture_playground = default_directory + "lectures_playground/"
default_directory_labs = default_directory + "labs/"
default_directory_labs_playground = default_directory + "labs_playground/"
default_directory_homework = default_directory + "homeworks/"
default_directory_homework_playground = default_directory + "homeworks_playground/"
default_directory_advanced_sections = default_directory + "a-sections/"
default_directory_advanced_sections_playground = default_directory + "a_sections_playground/"
default_directory_sections = default_directory + "sections/"
default_directory_sections_playground = default_directory + "sections_playground/"
try:
print("The default values are :")
print("Number of lectures: ", number_of_lectures)
print("Number of labs: ", number_of_labs)
print("Number of sections: ", number_of_sections)
print("Number of a-sections: ", number_of_advanced_sections)
print("Default directory: ", default_directory, '\n')
change = input("Do you want to change it ? Please press 'y' if you want or 'n' if you do not change it: ")
if change == '':
change = 'n'
while change not in ('y', 'n'):
change = input("Do you want to change it ? Please press 'y' if you want or 'n' if you do not change it: ")
if change == 'y':
default_directory = input("Please enter the default directory: ")
print("Default directory: ", default_directory, '\n')
default_directory_lectures = default_directory + "lectures/"
default_directory_lecture_playground = default_directory + "lecture_playground/"
default_directory_labs = default_directory + "labs/"
default_directory_labs_playground = default_directory + "lab_playground/"
default_directory_homework = default_directory + "homework/"
default_directory_homework_playground = default_directory + "homework_playground/"
default_directory_advanced_sections = default_directory + "a-section/"
number_of_lectures = int(input("Please enter the number of lectures: "))
print("Number of lectures: ", number_of_lectures, '\n')
number_of_labs = int(input("Please enter the number of labs: "))
print("Number of labs: ", number_of_labs, '\n')
number_of_homework = int(input("Please enter the number of homework: "))
print("Number of homework: ", number_of_homework, '\n')
number_of_advanced_sections = int(input("Please enter the number of advanced sections: "))
print("Number of advanced sections: ", number_of_advanced_sections, '\n')
ge = GenerateStructure(number_of_lectures, number_of_labs, number_of_homework, number_of_sections,
number_of_advanced_sections, folders,
default_directory, default_directory_lectures, default_directory_lecture_playground,
default_directory_labs, default_directory_labs_playground, default_directory_homework,
default_directory_homework_playground, default_directory_advanced_sections,
default_directory_advanced_sections_playground,
default_directory_sections_playground)
ge.create_lectures(default_directory_lectures, number_of_lectures, folders) # Create Lectures folders
ge.create_lecture_playground(default_directory_lecture_playground,
number_of_lectures, folders) # Create Lectures playground folders
ge.create_labs(default_directory_labs, number_of_labs, folders) # Create Labs folders
ge.create_labs(default_directory_labs_playground, number_of_labs, folders) # Create Labs playground folders
ge.create_homework(default_directory_homework, number_of_homework, folders) # Create Homework folders
ge.create_homework(default_directory_homework_playground,
number_of_homework, folders) # Create Homework playground folders
ge.create_section(default_directory_sections, number_of_sections,
folders) # Create sections folders
ge.create_section(default_directory_sections_playground, number_of_sections,
folders) # Create sections playground folders
ge.create_a_section(default_directory_advanced_sections, number_of_advanced_sections,
folders) # Create advanced sections folders
ge.create_a_section(default_directory_advanced_sections_playground, number_of_advanced_sections,
folders) # Create advanced sections folders
except OSError as err:
print("OS error: {0}".format(err))
except ValueError:
print("Could not convert data to an integer.")
except:
print("Unexpected error:", sys.exc_info()[0])
raise
| 44.341912 | 118 | 0.643313 | import os
import sys
from datetime import datetime
class GenerateStructure:
def __init__(self, number_of_lectures, number_of_labs, number_of_homework, number_of_sections,
number_of_advanced_sections, folders,
default_directory, default_directory_lectures, default_directory_lecture_playground,
default_directory_labs, default_directory_labs_playground, default_directory_homework,
default_directory_homework_playground, default_directory_advanced_sections,
default_directory_advanced_sections_playground,
default_directory_sections_playground):
self.number_of_lectures = number_of_lectures
self.number_of_labs = number_of_labs
self.number_of_homework = number_of_homework
self.number_of_sections = number_of_sections
self.number_of_advanced_sections = number_of_advanced_sections
self.folders = folders
self.default_directory = default_directory
self.default_directory_lectures = default_directory_lectures
self.default_directory_lecture_playground = default_directory_lecture_playground
self.default_directory_labs = default_directory_labs
self.default_directory_labs_playground = default_directory_labs_playground
self.default_directory_homework = default_directory_homework
self.default_directory_homework_playground = default_directory_homework_playground
self.default_directory_advanced_sections = default_directory_advanced_sections
self.default_directory_sections = default_directory_sections
self.default_directory_sections_playground = default_directory_sections_playground
@staticmethod
def create_directory(directory, fold):
os.makedirs(directory + fold)
open(directory + fold + '/.placeholder', 'w').close()
@staticmethod
def create_index(directory, title, category, slug, i):
with open(directory + "index.md", 'a')as index:
index.write("Title: " + title + str(i) + ':\n' +
"Category: " + category + '\n' +
"Date: " + datetime.today().strftime('%Y-%m-%d') + '\n' +
"Author: " + '\n' +
"Slug: " + slug + str(i) + '\n' +
"Tages: ADD TAGS HERE" +
'\n\n\n' + "## Slides" )
def create_lectures(self, directory, number_of_hw, folders):
for i in range(1, number_of_hw+1):
if not os.path.exists(directory):
os.makedirs(directory)
if not os.path.exists(directory + "lecture" + str(i)):
os.makedirs(directory + "lecture" + str(i))
directory_lectures = directory + "lecture" + str(i) + '/'
for fold in folders:
self.create_directory(directory_lectures, fold)
self.create_index(directory_lectures, "Lecture ", "lectures" , "lecture", i)
else:
print("The directory : '", directory + "lecture" + str(i), "' already exist.")
def create_lecture_playground(self, directory, number_of_lectures, folders):
for i in range(1, number_of_lectures+1):
if not os.path.exists(directory):
os.makedirs(directory)
if not os.path.exists(directory + "lecture" + str(i)):
os.makedirs(directory + "lecture" + str(i))
directory_lectures = directory + "lecture" + str(i) + '/'
for fold in folders:
self.create_directory(directory_lectures, fold)
else:
print("The directory : '", directory + "lecture" + str(i), "' already exist.")
def create_labs(self, directory, number_of_labs, folders):
for i in range(1, number_of_labs+1):
if not os.path.exists(directory):
os.makedirs(directory)
if not os.path.exists(directory + "lab" + str(i)):
os.makedirs(directory + "lab" + str(i))
directory_lab = directory + "lab" + str(i) + '/'
for fold in folders:
self.create_directory(directory_lab, fold)
self.create_index(directory_lab, "Lab ", "labs", "lab", i)
else:
print("The directory : '", directory + "lab" + str(i), "' already exist.")
def create_homework(self, directory, number_of_labs, folders):
for i in range(0, number_of_labs+1):
if not os.path.exists(directory):
os.makedirs(directory)
if not os.path.exists(directory + "how" + str(i)):
os.makedirs(directory + "how" + str(i))
directory_homework = directory + "how" + str(i) + '/'
for fold in folders:
self.create_directory(directory_homework, fold)
else:
print("The directory : '", directory + "homework" + str(i), "' already exist.")
def create_section(self, directory, sections, folders):
for i in range(1, sections + 1):
if not os.path.exists(directory):
os.makedirs(directory)
if not os.path.exists(directory + "section" + str(i)):
os.makedirs(directory + "section" + str(i))
directory_section = directory + "section" + str(i) + '/'
for fold in folders:
self.create_directory(directory_section, fold)
self.create_index(directory_section, "Sections ", "section", "section", i)
else:
print("The directory : '", directory + "Sections" + str(i), "' already exist.")
def create_a_section(self, directory, advanced_sections, folders):
for i in range(1, advanced_sections+1):
if not os.path.exists(directory):
os.makedirs(directory)
if not os.path.exists(directory + "a-sec" + str(i)):
os.makedirs(directory + "a-sec" + str(i))
directory_a_section = directory + "a-sec" + str(i) + '/'
for fold in folders:
self.create_directory(directory_a_section, fold)
self.create_index(directory_a_section, "Advanced Sections ", "a-sections", "a-sections", i)
else:
print("The directory : '", directory + "Advanced Sections" + str(i), "' already exist.")
if __name__ == "__main__":
number_of_lectures = 24
number_of_labs = 13
number_of_homework = 8
number_of_advanced_sections = 6
number_of_sections = 13
folders = ["data", "fig", "notes", "presentation"]
default_directory = os.path.dirname(os.path.realpath(__file__)) + '/'
default_directory_lectures = default_directory + "lectures/"
default_directory_lecture_playground = default_directory + "lectures_playground/"
default_directory_labs = default_directory + "labs/"
default_directory_labs_playground = default_directory + "labs_playground/"
default_directory_homework = default_directory + "homeworks/"
default_directory_homework_playground = default_directory + "homeworks_playground/"
default_directory_advanced_sections = default_directory + "a-sections/"
default_directory_advanced_sections_playground = default_directory + "a_sections_playground/"
default_directory_sections = default_directory + "sections/"
default_directory_sections_playground = default_directory + "sections_playground/"
try:
print("The default values are :")
print("Number of lectures: ", number_of_lectures)
print("Number of labs: ", number_of_labs)
print("Number of sections: ", number_of_sections)
print("Number of a-sections: ", number_of_advanced_sections)
print("Default directory: ", default_directory, '\n')
change = input("Do you want to change it ? Please press 'y' if you want or 'n' if you do not change it: ")
if change == '':
change = 'n'
while change not in ('y', 'n'):
change = input("Do you want to change it ? Please press 'y' if you want or 'n' if you do not change it: ")
if change == 'y':
default_directory = input("Please enter the default directory: ")
print("Default directory: ", default_directory, '\n')
default_directory_lectures = default_directory + "lectures/"
default_directory_lecture_playground = default_directory + "lecture_playground/"
default_directory_labs = default_directory + "labs/"
default_directory_labs_playground = default_directory + "lab_playground/"
default_directory_homework = default_directory + "homework/"
default_directory_homework_playground = default_directory + "homework_playground/"
default_directory_advanced_sections = default_directory + "a-section/"
number_of_lectures = int(input("Please enter the number of lectures: "))
print("Number of lectures: ", number_of_lectures, '\n')
number_of_labs = int(input("Please enter the number of labs: "))
print("Number of labs: ", number_of_labs, '\n')
number_of_homework = int(input("Please enter the number of homework: "))
print("Number of homework: ", number_of_homework, '\n')
number_of_advanced_sections = int(input("Please enter the number of advanced sections: "))
print("Number of advanced sections: ", number_of_advanced_sections, '\n')
ge = GenerateStructure(number_of_lectures, number_of_labs, number_of_homework, number_of_sections,
number_of_advanced_sections, folders,
default_directory, default_directory_lectures, default_directory_lecture_playground,
default_directory_labs, default_directory_labs_playground, default_directory_homework,
default_directory_homework_playground, default_directory_advanced_sections,
default_directory_advanced_sections_playground,
default_directory_sections_playground)
ge.create_lectures(default_directory_lectures, number_of_lectures, folders)
ge.create_lecture_playground(default_directory_lecture_playground,
number_of_lectures, folders)
ge.create_labs(default_directory_labs, number_of_labs, folders)
ge.create_labs(default_directory_labs_playground, number_of_labs, folders)
ge.create_homework(default_directory_homework, number_of_homework, folders)
ge.create_homework(default_directory_homework_playground,
number_of_homework, folders)
ge.create_section(default_directory_sections, number_of_sections,
folders)
ge.create_section(default_directory_sections_playground, number_of_sections,
folders)
ge.create_a_section(default_directory_advanced_sections, number_of_advanced_sections,
folders)
ge.create_a_section(default_directory_advanced_sections_playground, number_of_advanced_sections,
folders)
except OSError as err:
print("OS error: {0}".format(err))
except ValueError:
print("Could not convert data to an integer.")
except:
print("Unexpected error:", sys.exc_info()[0])
raise
| true | true |
f71d1305afa552bee4f335ebedcfe627045c6a38 | 1,760 | py | Python | src/pugnlp/scripts/bon_lsi.py | totalgood/pugnlp | 290e5761b60257a0ac876d1eaa0f1256da945d9a | [
"MIT"
] | 2 | 2019-06-01T10:08:45.000Z | 2020-11-07T08:13:31.000Z | src/pugnlp/scripts/bon_lsi.py | totalgood/pugnlp | 290e5761b60257a0ac876d1eaa0f1256da945d9a | [
"MIT"
] | null | null | null | src/pugnlp/scripts/bon_lsi.py | totalgood/pugnlp | 290e5761b60257a0ac876d1eaa0f1256da945d9a | [
"MIT"
] | 1 | 2018-08-28T20:12:09.000Z | 2018-08-28T20:12:09.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Bot-or-not tweet LSA/LSI model ."""
from __future__ import division, print_function, absolute_import, unicode_literals
from builtins import ( # noqa
bytes, dict, int, list, object, range, str,
ascii, chr, hex, input, next, oct, open,
pow, round, super,
filter, map, zip)
import logging
import pandas as pd
from nltk.tokenize.casual import casual_tokenize
from gensim.corpora import Dictionary
from gensim.models import LsiModel, TfidfModel
logger = logging.getLogger(__name__)
np = pd.np
def main(Tweet=None):
qs = Tweet.objects.filter(is_strict__gte=13)
tweets = np.array(qs.values_list('pk', 'text', 'user__screen_name', 'user__is_bot'))
tweets = pd.DataFrame(np.array(tweets), columns='pk text user is_bot'.split())
tweets = tweets.set_index('pk', drop=True)
tweets['tokens'] = tweets.text.apply(casual_tokenize)
vocab = Dictionary(tweets.tokens)
tfidf = TfidfModel(dictionary=vocab, id2word=vocab)
bows = pd.Series(vocab.doc2bow(toks) for toks in tweets.tokens)
lsi = LsiModel(tfidf[bows], num_topics=80, id2word=vocab, extra_samples=100, power_iters=2)
lsi.save('/home/hobs/src/hackor/twote/data/lsi{}x{}x{}.saved'.format(len(tweets), lsi.num_topics, lsi.num_terms))
topics = lsi[tfidf[bows]]
topics = pd.DataFrame([dict(d) for d in topics], index=tweets.index, columns=range(80))
if __name__ == '__main__':
try:
from twote.models import Tweet
except (ImportError, ModuleNotFoundError):
try:
from openchat.models import Tweet
except (ImportError, ModuleNotFoundError):
Tweet = object
logger.warn('Unable to import a Tweet data model (ORM object)')
main(Tweet)
| 36.666667 | 117 | 0.696023 |
from __future__ import division, print_function, absolute_import, unicode_literals
from builtins import (
bytes, dict, int, list, object, range, str,
ascii, chr, hex, input, next, oct, open,
pow, round, super,
filter, map, zip)
import logging
import pandas as pd
from nltk.tokenize.casual import casual_tokenize
from gensim.corpora import Dictionary
from gensim.models import LsiModel, TfidfModel
logger = logging.getLogger(__name__)
np = pd.np
def main(Tweet=None):
qs = Tweet.objects.filter(is_strict__gte=13)
tweets = np.array(qs.values_list('pk', 'text', 'user__screen_name', 'user__is_bot'))
tweets = pd.DataFrame(np.array(tweets), columns='pk text user is_bot'.split())
tweets = tweets.set_index('pk', drop=True)
tweets['tokens'] = tweets.text.apply(casual_tokenize)
vocab = Dictionary(tweets.tokens)
tfidf = TfidfModel(dictionary=vocab, id2word=vocab)
bows = pd.Series(vocab.doc2bow(toks) for toks in tweets.tokens)
lsi = LsiModel(tfidf[bows], num_topics=80, id2word=vocab, extra_samples=100, power_iters=2)
lsi.save('/home/hobs/src/hackor/twote/data/lsi{}x{}x{}.saved'.format(len(tweets), lsi.num_topics, lsi.num_terms))
topics = lsi[tfidf[bows]]
topics = pd.DataFrame([dict(d) for d in topics], index=tweets.index, columns=range(80))
if __name__ == '__main__':
try:
from twote.models import Tweet
except (ImportError, ModuleNotFoundError):
try:
from openchat.models import Tweet
except (ImportError, ModuleNotFoundError):
Tweet = object
logger.warn('Unable to import a Tweet data model (ORM object)')
main(Tweet)
| true | true |
f71d14d5d94757d06616ea762afce34495659c34 | 6,980 | py | Python | mlcomp/test/run_test.py | lisapm/mlpiper | 74ad5ae343d364682cc2f8aaa007f2e8a1d84929 | [
"Apache-2.0"
] | 7 | 2019-04-08T02:31:55.000Z | 2021-11-15T14:40:49.000Z | mlcomp/test/run_test.py | lisapm/mlpiper | 74ad5ae343d364682cc2f8aaa007f2e8a1d84929 | [
"Apache-2.0"
] | 31 | 2019-02-22T22:23:26.000Z | 2021-08-02T17:17:06.000Z | mlcomp/test/run_test.py | lisapm/mlpiper | 74ad5ae343d364682cc2f8aaa007f2e8a1d84929 | [
"Apache-2.0"
] | 8 | 2019-03-15T23:46:08.000Z | 2020-02-06T09:16:02.000Z | #!/usr/bin/env python3
import argparse
from os.path import expanduser
import glob
import json
import os
import re
import shutil
import subprocess
import tempfile
from termcolor import cprint, colored
class TestRunner:
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
def __init__(self, options):
self._options = options
self._main_egg_filepath = os.path.join(TestRunner.SCRIPT_DIR, '../dist/*.egg')
self._test_root_path = tempfile.mkdtemp(suffix='_mlcomp_it')
self._model_save_path = os.path.join(self._test_root_path, "saved-model")
self._test_pipeline_path = self._get_pipeline_path()
self._tmp_comps_egg_dir = tempfile.mkdtemp(suffix='_mlcomp_it_comps')
self._pipeline_json = None
if os.path.isdir(self._model_save_path):
shutil.rmtree(self._model_save_path)
print("\n*** " + colored("{}".format(os.path.basename(self._test_pipeline_path)), "cyan") + " ***\n")
fmt = "{:<25} {}"
print(fmt.format("Pipeline path:", self._test_pipeline_path))
print(fmt.format("Test dir:", self._test_root_path))
print(fmt.format("Components egg tmp dir:", self._tmp_comps_egg_dir))
print("")
def _get_pipeline_path(self):
if os.path.isabs(self._options.test):
test_pipeline_path = self._options.test
else:
test_pipeline_path = TestRunner.SCRIPT_DIR + "/" + self._options.test
if not test_pipeline_path.endswith('.json'):
test_pipeline_path += '.json'
if not os.path.isfile(test_pipeline_path):
raise Exception("Pipeline test file not found! path: {}".format(test_pipeline_path))
return test_pipeline_path
def go(self):
try:
self._create_main_egg()
self._load_pipeline()
self._create_components_egg()
self._prepare_test_dir()
self._execute_program()
self._cleanup()
cprint("\nTest passed successfully!\n", "green")
except Exception as e:
colored("Test failed!\n", "red")
raise e
def _create_main_egg(self):
cmd = TestRunner.SCRIPT_DIR + '/../bin/create-egg.sh --silent'
subprocess.check_call(cmd, shell=True)
def _create_components_egg(self):
dst_comp_tmp_dir = self._tmp_comps_egg_dir + '/parallelm/code_components'
os.makedirs(dst_comp_tmp_dir)
comp_names = set([e['type'] for e in self._pipeline_json['pipe']])
for comp_name in comp_names:
src_comp_dir = self._options.comps_root + '/' + comp_name
dst_comp_dir = dst_comp_tmp_dir + '/' + comp_name
shutil.copytree(src_comp_dir, dst_comp_dir)
shutil.copy(TestRunner.SCRIPT_DIR + '/setup.py', self._tmp_comps_egg_dir)
open(dst_comp_tmp_dir + '/__init__.py', 'w').close()
with open(dst_comp_tmp_dir + '/../__init__.py', 'w') as f:
f.write("__import__('pkg_resources').declare_namespace(__name__)")
create_egg_cmd = '{}/../bin/create-egg.sh --root={} --silent'.format(TestRunner.SCRIPT_DIR, self._tmp_comps_egg_dir)
subprocess.check_call(create_egg_cmd, shell=True)
for egg_filepath in glob.glob(self._tmp_comps_egg_dir + '/' + 'dist/*.egg'):
shutil.copy(egg_filepath, self._test_root_path)
def _load_pipeline(self):
with open(self._test_pipeline_path, 'r') as f:
content = f.read()
pipeline_dir = os.path.realpath(os.path.dirname(self._test_pipeline_path))
revised_content = re.sub(r'\$__pipeline_dir__\$', pipeline_dir, content, flags=re.M)
self._pipeline_json = json.loads(revised_content)
def _prepare_test_dir(self):
self._dst_test_driver_path = self._test_root_path + '/driver.py'
main_py_path = os.path.join(TestRunner.SCRIPT_DIR, '../__main__.py')
shutil.copyfile(main_py_path, self._dst_test_driver_path)
for egg_filepath in glob.glob(self._main_egg_filepath):
shutil.copy(egg_filepath, self._test_root_path)
self._pipeline_json['systemConfig']['modelFileSinkPath'] = self._model_save_path
self._dst_test_pipeline_path = self._test_root_path + '/' + os.path.basename(self._test_pipeline_path)
with open(self._dst_test_pipeline_path, 'w') as f:
json.dump(self._pipeline_json, f)
def _execute_program(self):
master = 'spark://localhost:7077' if self._options.local_cluster else 'local[*]'
eggs = ','.join(glob.glob(self._test_root_path + '/*.egg'))
spark_submit_tool = os.environ['SPARK_HOME'] + "/bin/spark-submit"
submit_cmd = '{} --master {} --py-files {} {} exec -f {}'.format(spark_submit_tool, master, eggs,
self._dst_test_driver_path,
self._dst_test_pipeline_path)
print("--- Start of Engine Output ---")
with subprocess.Popen(submit_cmd, shell=True, stdout=subprocess.PIPE, bufsize=1, universal_newlines=True) as p:
for line in p.stdout:
print(line)
print("--- End of Engine Output ---")
if p.returncode != 0:
print("Test failed!", "red")
raise subprocess.CalledProcessError(returncode=p.returncode, cmd=p.args)
subprocess.check_call(TestRunner.SCRIPT_DIR + '/../bin/cleanup.sh', shell=True)
print("\n-----------------")
def _cleanup(self):
print("Cleaning up ... " + self._tmp_comps_egg_dir)
shutil.rmtree(self._tmp_comps_egg_dir)
should_clean = 'y'
if self._options.ask_clean:
should_clean = input("\nShould clean up test root path [{}]? [Y|n] "
.format(self._test_root_path)).lower()
if should_clean == 'y':
print("Cleaning up ... " + self._test_root_path)
shutil.rmtree(self._test_root_path)
def parse_args(args):
parser = argparse.ArgumentParser(description='Run full PySpark integration test')
parser.add_argument('--test', default='pi-pipeline-rdd.json',
help='test pipeline json file path')
parser.add_argument('--comps-root',
default=os.path.join(expanduser("~"), "dev/mlops-components/dev/connectable/pyspark/spark-context"),
help='ml components root dir')
parser.add_argument('--local-cluster', action="store_true",
help='Specify whether to run test on local Spark cluster [default: embedded]')
parser.add_argument('--ask-clean', action="store_true", default=False,
help="Wait for user's confirmation before cleanup")
args = parser.parse_args(args)
return args
def main(args=None):
options = parse_args(args)
TestRunner(options).go()
if __name__ == '__main__':
main()
| 40.114943 | 124 | 0.629656 |
import argparse
from os.path import expanduser
import glob
import json
import os
import re
import shutil
import subprocess
import tempfile
from termcolor import cprint, colored
class TestRunner:
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
def __init__(self, options):
self._options = options
self._main_egg_filepath = os.path.join(TestRunner.SCRIPT_DIR, '../dist/*.egg')
self._test_root_path = tempfile.mkdtemp(suffix='_mlcomp_it')
self._model_save_path = os.path.join(self._test_root_path, "saved-model")
self._test_pipeline_path = self._get_pipeline_path()
self._tmp_comps_egg_dir = tempfile.mkdtemp(suffix='_mlcomp_it_comps')
self._pipeline_json = None
if os.path.isdir(self._model_save_path):
shutil.rmtree(self._model_save_path)
print("\n*** " + colored("{}".format(os.path.basename(self._test_pipeline_path)), "cyan") + " ***\n")
fmt = "{:<25} {}"
print(fmt.format("Pipeline path:", self._test_pipeline_path))
print(fmt.format("Test dir:", self._test_root_path))
print(fmt.format("Components egg tmp dir:", self._tmp_comps_egg_dir))
print("")
def _get_pipeline_path(self):
if os.path.isabs(self._options.test):
test_pipeline_path = self._options.test
else:
test_pipeline_path = TestRunner.SCRIPT_DIR + "/" + self._options.test
if not test_pipeline_path.endswith('.json'):
test_pipeline_path += '.json'
if not os.path.isfile(test_pipeline_path):
raise Exception("Pipeline test file not found! path: {}".format(test_pipeline_path))
return test_pipeline_path
def go(self):
try:
self._create_main_egg()
self._load_pipeline()
self._create_components_egg()
self._prepare_test_dir()
self._execute_program()
self._cleanup()
cprint("\nTest passed successfully!\n", "green")
except Exception as e:
colored("Test failed!\n", "red")
raise e
def _create_main_egg(self):
cmd = TestRunner.SCRIPT_DIR + '/../bin/create-egg.sh --silent'
subprocess.check_call(cmd, shell=True)
def _create_components_egg(self):
dst_comp_tmp_dir = self._tmp_comps_egg_dir + '/parallelm/code_components'
os.makedirs(dst_comp_tmp_dir)
comp_names = set([e['type'] for e in self._pipeline_json['pipe']])
for comp_name in comp_names:
src_comp_dir = self._options.comps_root + '/' + comp_name
dst_comp_dir = dst_comp_tmp_dir + '/' + comp_name
shutil.copytree(src_comp_dir, dst_comp_dir)
shutil.copy(TestRunner.SCRIPT_DIR + '/setup.py', self._tmp_comps_egg_dir)
open(dst_comp_tmp_dir + '/__init__.py', 'w').close()
with open(dst_comp_tmp_dir + '/../__init__.py', 'w') as f:
f.write("__import__('pkg_resources').declare_namespace(__name__)")
create_egg_cmd = '{}/../bin/create-egg.sh --root={} --silent'.format(TestRunner.SCRIPT_DIR, self._tmp_comps_egg_dir)
subprocess.check_call(create_egg_cmd, shell=True)
for egg_filepath in glob.glob(self._tmp_comps_egg_dir + '/' + 'dist/*.egg'):
shutil.copy(egg_filepath, self._test_root_path)
def _load_pipeline(self):
with open(self._test_pipeline_path, 'r') as f:
content = f.read()
pipeline_dir = os.path.realpath(os.path.dirname(self._test_pipeline_path))
revised_content = re.sub(r'\$__pipeline_dir__\$', pipeline_dir, content, flags=re.M)
self._pipeline_json = json.loads(revised_content)
def _prepare_test_dir(self):
self._dst_test_driver_path = self._test_root_path + '/driver.py'
main_py_path = os.path.join(TestRunner.SCRIPT_DIR, '../__main__.py')
shutil.copyfile(main_py_path, self._dst_test_driver_path)
for egg_filepath in glob.glob(self._main_egg_filepath):
shutil.copy(egg_filepath, self._test_root_path)
self._pipeline_json['systemConfig']['modelFileSinkPath'] = self._model_save_path
self._dst_test_pipeline_path = self._test_root_path + '/' + os.path.basename(self._test_pipeline_path)
with open(self._dst_test_pipeline_path, 'w') as f:
json.dump(self._pipeline_json, f)
def _execute_program(self):
master = 'spark://localhost:7077' if self._options.local_cluster else 'local[*]'
eggs = ','.join(glob.glob(self._test_root_path + '/*.egg'))
spark_submit_tool = os.environ['SPARK_HOME'] + "/bin/spark-submit"
submit_cmd = '{} --master {} --py-files {} {} exec -f {}'.format(spark_submit_tool, master, eggs,
self._dst_test_driver_path,
self._dst_test_pipeline_path)
print("--- Start of Engine Output ---")
with subprocess.Popen(submit_cmd, shell=True, stdout=subprocess.PIPE, bufsize=1, universal_newlines=True) as p:
for line in p.stdout:
print(line)
print("--- End of Engine Output ---")
if p.returncode != 0:
print("Test failed!", "red")
raise subprocess.CalledProcessError(returncode=p.returncode, cmd=p.args)
subprocess.check_call(TestRunner.SCRIPT_DIR + '/../bin/cleanup.sh', shell=True)
print("\n-----------------")
def _cleanup(self):
print("Cleaning up ... " + self._tmp_comps_egg_dir)
shutil.rmtree(self._tmp_comps_egg_dir)
should_clean = 'y'
if self._options.ask_clean:
should_clean = input("\nShould clean up test root path [{}]? [Y|n] "
.format(self._test_root_path)).lower()
if should_clean == 'y':
print("Cleaning up ... " + self._test_root_path)
shutil.rmtree(self._test_root_path)
def parse_args(args):
parser = argparse.ArgumentParser(description='Run full PySpark integration test')
parser.add_argument('--test', default='pi-pipeline-rdd.json',
help='test pipeline json file path')
parser.add_argument('--comps-root',
default=os.path.join(expanduser("~"), "dev/mlops-components/dev/connectable/pyspark/spark-context"),
help='ml components root dir')
parser.add_argument('--local-cluster', action="store_true",
help='Specify whether to run test on local Spark cluster [default: embedded]')
parser.add_argument('--ask-clean', action="store_true", default=False,
help="Wait for user's confirmation before cleanup")
args = parser.parse_args(args)
return args
def main(args=None):
options = parse_args(args)
TestRunner(options).go()
if __name__ == '__main__':
main()
| true | true |
f71d14e93643f0e1bfa895edafc0dd3f02d0b475 | 226 | py | Python | setup.py | bethanymorin/smt-scrapy | 50878b47094d0b4d72c483802d3aec9077c2b16b | [
"MIT"
] | null | null | null | setup.py | bethanymorin/smt-scrapy | 50878b47094d0b4d72c483802d3aec9077c2b16b | [
"MIT"
] | 3 | 2017-08-15T23:51:47.000Z | 2017-09-26T18:27:55.000Z | setup.py | bethanymorin/smt-scrapy | 50878b47094d0b4d72c483802d3aec9077c2b16b | [
"MIT"
] | null | null | null | # Automatically created by: shub deploy
from setuptools import setup, find_packages
setup(
name='project',
version='1.0',
packages=find_packages(),
entry_points={'scrapy': ['settings = scraper.settings']},
)
| 20.545455 | 61 | 0.69469 |
from setuptools import setup, find_packages
setup(
name='project',
version='1.0',
packages=find_packages(),
entry_points={'scrapy': ['settings = scraper.settings']},
)
| true | true |
f71d157d8596c23bae0491b174ad13908d464306 | 300 | py | Python | fructify/blueprints/debug.py | fffergal/fructify | f6043e157d3ecfce1cafba6d16769b21f6defb4b | [
"Apache-2.0"
] | null | null | null | fructify/blueprints/debug.py | fffergal/fructify | f6043e157d3ecfce1cafba6d16769b21f6defb4b | [
"Apache-2.0"
] | 20 | 2020-03-14T16:05:43.000Z | 2022-02-20T23:55:53.000Z | fructify/blueprints/debug.py | fffergal/fructify | f6043e157d3ecfce1cafba6d16769b21f6defb4b | [
"Apache-2.0"
] | null | null | null | from flask import Blueprint, request
bp = Blueprint("debug", __name__)
@bp.route("/api/v1/debug", methods=["DELETE", "GET", "POST"])
def debug():
if request.method == "DELETE":
raise Exception
if request.method != "POST":
return request.args
return request.get_data()
| 21.428571 | 61 | 0.64 | from flask import Blueprint, request
bp = Blueprint("debug", __name__)
@bp.route("/api/v1/debug", methods=["DELETE", "GET", "POST"])
def debug():
if request.method == "DELETE":
raise Exception
if request.method != "POST":
return request.args
return request.get_data()
| true | true |
f71d16727351212210fec29a8089ad61e1171cad | 10,375 | py | Python | process/VtkRenderer.py | sameeptandon/sail-car-log | 0ee3d598bb09d389bcbd2ebf73cd4b2411e796be | [
"BSD-2-Clause"
] | 1 | 2021-02-24T03:11:13.000Z | 2021-02-24T03:11:13.000Z | process/VtkRenderer.py | sameeptandon/sail-car-log | 0ee3d598bb09d389bcbd2ebf73cd4b2411e796be | [
"BSD-2-Clause"
] | null | null | null | process/VtkRenderer.py | sameeptandon/sail-car-log | 0ee3d598bb09d389bcbd2ebf73cd4b2411e796be | [
"BSD-2-Clause"
] | 3 | 2015-03-18T14:36:04.000Z | 2018-07-04T02:57:24.000Z | import vtk
from numpy import random
import numpy as np
import vtk.util.numpy_support as converter
import time
import cv2
import itertools
class VtkText:
def __init__(self, text, pos):
self.text = text
self.pos = pos
def get_vtk_text(self):
txt = vtk.vtkTextActor()
txt.SetInput(self.text)
txtprop=txt.GetTextProperty()
txtprop.SetFontFamilyToArial()
txtprop.SetFontSize(18)
txtprop.SetColor(1,1,1)
txt.SetDisplayPosition(*self.pos)
return txt
class VtkLine:
def __init__(self, p0, p1):
self.p0 = p0
self.p1 = p1
def get_vtk_line(self):
source = vtk.vtkLineSource()
source.SetPoint1(self.p0)
source.SetPoint2(self.p1)
# mapper
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(source.GetOutputPort())
# actor
actor = vtk.vtkActor()
actor.SetMapper(mapper)
# assign actor to the renderer
return actor
class VtkPlane:
def __init__(self, norm, xyz):
self.norm = norm
self.xyz = xyz
def get_vtk_plane(self, side_len=25):
# cube = vtk.vtkCubeSource()
# cube.SetXLength(side_len)
# cube.SetYLength(side_len)
# cube.SetZLength(side_len)
# cube.SetCenter(*self.pos)
cube = vtk.vtkSphereSource()
cube.SetThetaResolution(100)
cube.SetPhiResolution(100)
cube.SetRadius(side_len)
cube.SetCenter(*self.xyz)
cubeMapper = vtk.vtkPolyDataMapper()
cubeMapper.SetInputConnection(cube.GetOutputPort())
plane = vtk.vtkPlane()
plane.SetOrigin(*self.xyz)
plane.SetNormal(*self.norm)
#create cutter
cutter = vtk.vtkCutter()
cutter.SetCutFunction(plane)
cutter.SetInputConnection(cube.GetOutputPort())
cutter.Update()
cutStrips = vtk.vtkStripper()
cutStrips.SetInputConnection(cutter.GetOutputPort())
cutStrips.Update()
cutPoly = vtk.vtkPolyData()
cutPoly.SetPoints((cutStrips.GetOutput()).GetPoints())
cutPoly.SetPolys((cutStrips.GetOutput()).GetLines())
cutMapper = vtk.vtkPolyDataMapper()
cutMapper.SetInput(cutPoly)
cutActor = vtk.vtkActor()
cutActor.GetProperty().SetColor(1, 1, 1)
cutActor.SetMapper(cutMapper)
return cutActor
class VtkImage:
def __init__(self, im):
self.im = cv2.cvtColor(im, cv2.COLOR_BGR2RGB)
def get_vtk_image(self):
importer = vtk.vtkImageImport()
importer.SetDataSpacing(1,1,1)
importer.SetDataOrigin(0,0,0)
importer.SetWholeExtent(0, self.im.shape[1] - 1,
0, self.im.shape[0] - 1, 0, 0)
importer.SetDataExtentToWholeExtent()
importer.SetDataScalarTypeToUnsignedChar()
importer.SetNumberOfScalarComponents(self.im.shape[2])
importer.SetImportVoidPointer(self.im)
importer.Update()
flipY = vtk.vtkImageFlip()
flipY.SetFilteredAxis(1)
flipY.SetInputConnection(importer.GetOutputPort())
flipY.Update()
yActor = vtk.vtkImageActor()
yActor.SetInput(flipY.GetOutput())
return yActor
class VtkEllipsoid:
def __init__(self, T):
self.T = (T[0,0], T[0,1], T[0,2], T[0,3],
T[1,0], T[1,1], T[1,2], T[1,3],
T[2,0], T[2,1], T[2,2], T[2,3],
T[3,0], T[3,1], T[3,2], T[3,3])
def get_vtk_ellipsoid(self):
self.transformMatrix = vtk.vtkMatrix4x4()
self.transformMatrix.DeepCopy(self.T)
transform = vtk.vtkTransform()
transform.SetMatrix(self.transformMatrix)
self.source = vtk.vtkSphereSource()
self.source.SetRadius(1.0)
self.source.SetCenter(0.0,0.0,0.0)
transformFilter = vtk.vtkTransformPolyDataFilter()
transformFilter.SetTransform(transform)
transformFilter.SetInputConnection(self.source.GetOutputPort())
transformFilter.Update()
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(transformFilter.GetOutputPort())
actor = vtk.vtkActor()
actor.GetProperty().SetOpacity(0.8)
actor.SetMapper(mapper)
# assign actor to the renderer
return actor
class VtkBoundingBox:
def __init__(self, properties):
# (x, y) is the center-back of the car
(x, y, z, l, w) = tuple(properties[:5])
h = 1
x = [x, x+l]
y = [y-w/2., y+w/2.]
z = [z-h/2., z+h/2.]
self.bounds = (x[0], x[1], y[0], y[1], z[0], z[1])
self.actor = None
self.source = None
def get_vtk_box(self, rot = 0):
# create source
source = vtk.vtkCubeSource()
source.SetBounds(self.bounds)
self.source = source
# mapper
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(source.GetOutputPort())
# actor
actor = vtk.vtkActor()
actor.SetMapper(mapper)
actor.GetProperty().SetRepresentationToWireframe()
actor.GetProperty().SetLineWidth(1)
actor.GetProperty().LightingOff()
actor.SetOrigin(source.GetCenter())
actor.RotateZ(rot)
self.actor = actor
# assign actor to the renderer
return actor
"""
Uses VTK to render a point cloud based on intensities, which might be floating point numbers or RGB values.
Disclaimer: This function passes points to vtk, so make sure that your data does not get deallocated by python. Somethings are copied too. It's not really an efficient function and it's a slight miracle that it even works.
For internal VTK debugging, here's the layout of things:
- vtkPoints consist of the x,y,z coordinates. Pass in an array of size m x 3 (where m is the number of points)
- vtkCells tells vtk how to render the points. It is formatted as "1 1 1 2 1 3 ... 1 m' where the 1 tells how many points it should consider in a surface and the even element says which point id to use.
The function build_vtk_polydata will do the assembling magic.
Then you can call get_vtk_color_cloud or get_vtk_cloud based on how you want to color map each point.
"""
class VtkPointCloud:
def __init__(self, xyz, intensity):
self.xyz = np.ascontiguousarray(xyz)
self.intensity = np.ascontiguousarray(intensity)
num_points = self.xyz.shape[0]
np_cells_A = np.ones(num_points,dtype=np.int64)
np_cells_B = np.arange(0,num_points,dtype=np.int64)
self.np_cells = np.empty(2*num_points,dtype=np.int64)
self.np_cells[::2] = np_cells_A
self.np_cells[1::2] = np_cells_B
self.actor = None
def build_vtk_polydata(self):
vtkPolyData = vtk.vtkPolyData()
vtkPoints = vtk.vtkPoints()
vtkCells = vtk.vtkCellArray()
vtkPolyData.SetPoints(vtkPoints)
vtkPolyData.SetVerts(vtkCells)
num_points = self.xyz.shape[0]
vtk_data = converter.numpy_to_vtk(self.xyz)
vtkPoints.SetNumberOfPoints(num_points)
vtkPoints.SetData(vtk_data)
vtkCells.SetCells(num_points, converter.numpy_to_vtkIdTypeArray(self.np_cells, deep=1))
return (vtkPolyData, vtkPoints, vtkCells)
def get_vtk_color_cloud(self):
assert(self.intensity.shape[1] == 3)
(vtkPolyData, vtkPoints, vtkCells) = self.build_vtk_polydata()
self.intensity = self.intensity.astype(np.uint8)
vtk_color_data = converter.numpy_to_vtk(self.intensity)
vtk_color_data.SetName('ColorArray')
vtkPolyData.GetPointData().SetScalars(vtk_color_data)
vtkPolyData.GetPointData().SetActiveScalars('ColorArray')
mapper = vtk.vtkPolyDataMapper()
mapper.SetInput(vtkPolyData)
vtkActor = vtk.vtkActor()
vtkActor.SetMapper(mapper)
self.actor = vtkActor
return vtkActor
def get_vtk_cloud(self, zMin=-10.0,zMax=10.0):
assert( len(self.intensity.shape) == 1)
(vtkPolyData, vtkPoints, vtkCells) = self.build_vtk_polydata()
self.intensity == self.intensity.astype(np.float32)
vtk_intensity_data = converter.numpy_to_vtk(self.intensity)
vtk_intensity_data.SetName('DepthArray')
vtkPolyData.GetPointData().SetScalars(vtk_intensity_data)
num_points = self.xyz.shape[0]
#vtkDepth = vtk.vtkFloatArray()
#vtkDepth.SetName('DepthArray')
#vtkPolyData.GetPointData().SetScalars(vtkDepth)
#vtkDepth.SetVoidArray(self.intensity, num_points, 1)
vtkPolyData.GetPointData().SetActiveScalars('DepthArray')
mapper = vtk.vtkPolyDataMapper()
mapper.SetInput(vtkPolyData)
mapper.SetColorModeToDefault()
mapper.SetScalarRange(zMin, zMax)
mapper.SetScalarVisibility(1)
vtkActor = vtk.vtkActor()
vtkActor.SetMapper(mapper)
self.actor = vtkActor
return vtkActor
############# sample callback setup ###############
class vtkTimerCallback():
def __init__(self):
pass
def execute(self,obj,event):
t = time.time()
data = 40*(random.random((60000,3))-0.5)
pointCloud = VtkPointCloud(data, data[:,2])
iren = obj
iren.GetRenderWindow().GetRenderers().GetFirstRenderer().RemoveActor(self.actor)
self.actor = pointCloud.get_vtk_cloud()
iren.GetRenderWindow().GetRenderers().GetFirstRenderer().AddActor(self.actor)
iren.GetRenderWindow().Render()
print time.time() - t
if __name__ == '__main__':
data = 40*(random.random((600,3))-0.5)
pointCloud = VtkPointCloud(data, data[:,2])
actor = pointCloud.get_vtk_cloud()
# Renderer
renderer = vtk.vtkRenderer()
renderer.AddActor(actor)
renderer.SetBackground(0.0, 0.0, 0.)
renderer.ResetCamera()
# Render Window
renderWindow = vtk.vtkRenderWindow()
renderWindow.SetSize(600,600)
renderWindow.AddRenderer(renderer)
# Interactor
renderWindowInteractor = vtk.vtkRenderWindowInteractor()
renderWindowInteractor.SetRenderWindow(renderWindow)
# Begin Interaction
renderWindow.Render()
renderWindowInteractor.Initialize()
cb = vtkTimerCallback()
cb.actor = actor
renderWindowInteractor.AddObserver('TimerEvent', cb.execute)
timerId = renderWindowInteractor.CreateRepeatingTimer(50)
renderWindowInteractor.Start()
| 31.825153 | 222 | 0.652145 | import vtk
from numpy import random
import numpy as np
import vtk.util.numpy_support as converter
import time
import cv2
import itertools
class VtkText:
def __init__(self, text, pos):
self.text = text
self.pos = pos
def get_vtk_text(self):
txt = vtk.vtkTextActor()
txt.SetInput(self.text)
txtprop=txt.GetTextProperty()
txtprop.SetFontFamilyToArial()
txtprop.SetFontSize(18)
txtprop.SetColor(1,1,1)
txt.SetDisplayPosition(*self.pos)
return txt
class VtkLine:
def __init__(self, p0, p1):
self.p0 = p0
self.p1 = p1
def get_vtk_line(self):
source = vtk.vtkLineSource()
source.SetPoint1(self.p0)
source.SetPoint2(self.p1)
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(source.GetOutputPort())
actor = vtk.vtkActor()
actor.SetMapper(mapper)
return actor
class VtkPlane:
def __init__(self, norm, xyz):
self.norm = norm
self.xyz = xyz
def get_vtk_plane(self, side_len=25):
cube = vtk.vtkSphereSource()
cube.SetThetaResolution(100)
cube.SetPhiResolution(100)
cube.SetRadius(side_len)
cube.SetCenter(*self.xyz)
cubeMapper = vtk.vtkPolyDataMapper()
cubeMapper.SetInputConnection(cube.GetOutputPort())
plane = vtk.vtkPlane()
plane.SetOrigin(*self.xyz)
plane.SetNormal(*self.norm)
cutter = vtk.vtkCutter()
cutter.SetCutFunction(plane)
cutter.SetInputConnection(cube.GetOutputPort())
cutter.Update()
cutStrips = vtk.vtkStripper()
cutStrips.SetInputConnection(cutter.GetOutputPort())
cutStrips.Update()
cutPoly = vtk.vtkPolyData()
cutPoly.SetPoints((cutStrips.GetOutput()).GetPoints())
cutPoly.SetPolys((cutStrips.GetOutput()).GetLines())
cutMapper = vtk.vtkPolyDataMapper()
cutMapper.SetInput(cutPoly)
cutActor = vtk.vtkActor()
cutActor.GetProperty().SetColor(1, 1, 1)
cutActor.SetMapper(cutMapper)
return cutActor
class VtkImage:
def __init__(self, im):
self.im = cv2.cvtColor(im, cv2.COLOR_BGR2RGB)
def get_vtk_image(self):
importer = vtk.vtkImageImport()
importer.SetDataSpacing(1,1,1)
importer.SetDataOrigin(0,0,0)
importer.SetWholeExtent(0, self.im.shape[1] - 1,
0, self.im.shape[0] - 1, 0, 0)
importer.SetDataExtentToWholeExtent()
importer.SetDataScalarTypeToUnsignedChar()
importer.SetNumberOfScalarComponents(self.im.shape[2])
importer.SetImportVoidPointer(self.im)
importer.Update()
flipY = vtk.vtkImageFlip()
flipY.SetFilteredAxis(1)
flipY.SetInputConnection(importer.GetOutputPort())
flipY.Update()
yActor = vtk.vtkImageActor()
yActor.SetInput(flipY.GetOutput())
return yActor
class VtkEllipsoid:
def __init__(self, T):
self.T = (T[0,0], T[0,1], T[0,2], T[0,3],
T[1,0], T[1,1], T[1,2], T[1,3],
T[2,0], T[2,1], T[2,2], T[2,3],
T[3,0], T[3,1], T[3,2], T[3,3])
def get_vtk_ellipsoid(self):
self.transformMatrix = vtk.vtkMatrix4x4()
self.transformMatrix.DeepCopy(self.T)
transform = vtk.vtkTransform()
transform.SetMatrix(self.transformMatrix)
self.source = vtk.vtkSphereSource()
self.source.SetRadius(1.0)
self.source.SetCenter(0.0,0.0,0.0)
transformFilter = vtk.vtkTransformPolyDataFilter()
transformFilter.SetTransform(transform)
transformFilter.SetInputConnection(self.source.GetOutputPort())
transformFilter.Update()
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(transformFilter.GetOutputPort())
actor = vtk.vtkActor()
actor.GetProperty().SetOpacity(0.8)
actor.SetMapper(mapper)
return actor
class VtkBoundingBox:
def __init__(self, properties):
(x, y, z, l, w) = tuple(properties[:5])
h = 1
x = [x, x+l]
y = [y-w/2., y+w/2.]
z = [z-h/2., z+h/2.]
self.bounds = (x[0], x[1], y[0], y[1], z[0], z[1])
self.actor = None
self.source = None
def get_vtk_box(self, rot = 0):
source = vtk.vtkCubeSource()
source.SetBounds(self.bounds)
self.source = source
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(source.GetOutputPort())
actor = vtk.vtkActor()
actor.SetMapper(mapper)
actor.GetProperty().SetRepresentationToWireframe()
actor.GetProperty().SetLineWidth(1)
actor.GetProperty().LightingOff()
actor.SetOrigin(source.GetCenter())
actor.RotateZ(rot)
self.actor = actor
return actor
"""
Uses VTK to render a point cloud based on intensities, which might be floating point numbers or RGB values.
Disclaimer: This function passes points to vtk, so make sure that your data does not get deallocated by python. Somethings are copied too. It's not really an efficient function and it's a slight miracle that it even works.
For internal VTK debugging, here's the layout of things:
- vtkPoints consist of the x,y,z coordinates. Pass in an array of size m x 3 (where m is the number of points)
- vtkCells tells vtk how to render the points. It is formatted as "1 1 1 2 1 3 ... 1 m' where the 1 tells how many points it should consider in a surface and the even element says which point id to use.
The function build_vtk_polydata will do the assembling magic.
Then you can call get_vtk_color_cloud or get_vtk_cloud based on how you want to color map each point.
"""
class VtkPointCloud:
def __init__(self, xyz, intensity):
self.xyz = np.ascontiguousarray(xyz)
self.intensity = np.ascontiguousarray(intensity)
num_points = self.xyz.shape[0]
np_cells_A = np.ones(num_points,dtype=np.int64)
np_cells_B = np.arange(0,num_points,dtype=np.int64)
self.np_cells = np.empty(2*num_points,dtype=np.int64)
self.np_cells[::2] = np_cells_A
self.np_cells[1::2] = np_cells_B
self.actor = None
def build_vtk_polydata(self):
vtkPolyData = vtk.vtkPolyData()
vtkPoints = vtk.vtkPoints()
vtkCells = vtk.vtkCellArray()
vtkPolyData.SetPoints(vtkPoints)
vtkPolyData.SetVerts(vtkCells)
num_points = self.xyz.shape[0]
vtk_data = converter.numpy_to_vtk(self.xyz)
vtkPoints.SetNumberOfPoints(num_points)
vtkPoints.SetData(vtk_data)
vtkCells.SetCells(num_points, converter.numpy_to_vtkIdTypeArray(self.np_cells, deep=1))
return (vtkPolyData, vtkPoints, vtkCells)
def get_vtk_color_cloud(self):
assert(self.intensity.shape[1] == 3)
(vtkPolyData, vtkPoints, vtkCells) = self.build_vtk_polydata()
self.intensity = self.intensity.astype(np.uint8)
vtk_color_data = converter.numpy_to_vtk(self.intensity)
vtk_color_data.SetName('ColorArray')
vtkPolyData.GetPointData().SetScalars(vtk_color_data)
vtkPolyData.GetPointData().SetActiveScalars('ColorArray')
mapper = vtk.vtkPolyDataMapper()
mapper.SetInput(vtkPolyData)
vtkActor = vtk.vtkActor()
vtkActor.SetMapper(mapper)
self.actor = vtkActor
return vtkActor
def get_vtk_cloud(self, zMin=-10.0,zMax=10.0):
assert( len(self.intensity.shape) == 1)
(vtkPolyData, vtkPoints, vtkCells) = self.build_vtk_polydata()
self.intensity == self.intensity.astype(np.float32)
vtk_intensity_data = converter.numpy_to_vtk(self.intensity)
vtk_intensity_data.SetName('DepthArray')
vtkPolyData.GetPointData().SetScalars(vtk_intensity_data)
num_points = self.xyz.shape[0]
#vtkDepth = vtk.vtkFloatArray()
#vtkDepth.SetName('DepthArray')
#vtkPolyData.GetPointData().SetScalars(vtkDepth)
#vtkDepth.SetVoidArray(self.intensity, num_points, 1)
vtkPolyData.GetPointData().SetActiveScalars('DepthArray')
mapper = vtk.vtkPolyDataMapper()
mapper.SetInput(vtkPolyData)
mapper.SetColorModeToDefault()
mapper.SetScalarRange(zMin, zMax)
mapper.SetScalarVisibility(1)
vtkActor = vtk.vtkActor()
vtkActor.SetMapper(mapper)
self.actor = vtkActor
return vtkActor
############# sample callback setup ###############
class vtkTimerCallback():
def __init__(self):
pass
def execute(self,obj,event):
t = time.time()
data = 40*(random.random((60000,3))-0.5)
pointCloud = VtkPointCloud(data, data[:,2])
iren = obj
iren.GetRenderWindow().GetRenderers().GetFirstRenderer().RemoveActor(self.actor)
self.actor = pointCloud.get_vtk_cloud()
iren.GetRenderWindow().GetRenderers().GetFirstRenderer().AddActor(self.actor)
iren.GetRenderWindow().Render()
print time.time() - t
if __name__ == '__main__':
data = 40*(random.random((600,3))-0.5)
pointCloud = VtkPointCloud(data, data[:,2])
actor = pointCloud.get_vtk_cloud()
# Renderer
renderer = vtk.vtkRenderer()
renderer.AddActor(actor)
renderer.SetBackground(0.0, 0.0, 0.)
renderer.ResetCamera()
# Render Window
renderWindow = vtk.vtkRenderWindow()
renderWindow.SetSize(600,600)
renderWindow.AddRenderer(renderer)
# Interactor
renderWindowInteractor = vtk.vtkRenderWindowInteractor()
renderWindowInteractor.SetRenderWindow(renderWindow)
# Begin Interaction
renderWindow.Render()
renderWindowInteractor.Initialize()
cb = vtkTimerCallback()
cb.actor = actor
renderWindowInteractor.AddObserver('TimerEvent', cb.execute)
timerId = renderWindowInteractor.CreateRepeatingTimer(50)
renderWindowInteractor.Start()
| false | true |
f71d16974deddb08bf8a27d6c9d768d917a81691 | 501 | py | Python | beluga/utils/storage.py | doublefloyd/beluga | 740bda376634945ef51bf1cf946fcbe002e9bc7f | [
"MIT"
] | 20 | 2017-10-02T13:09:58.000Z | 2022-03-28T20:50:35.000Z | beluga/utils/storage.py | doublefloyd/beluga | 740bda376634945ef51bf1cf946fcbe002e9bc7f | [
"MIT"
] | 187 | 2018-02-04T20:35:03.000Z | 2021-01-27T15:04:18.000Z | beluga/utils/storage.py | doublefloyd/beluga | 740bda376634945ef51bf1cf946fcbe002e9bc7f | [
"MIT"
] | 12 | 2018-01-19T04:00:09.000Z | 2022-03-28T16:44:17.000Z | import cloudpickle as pickle
def save(sol_set=None, ocp=None, bvp=None, filename='data.beluga'):
save_dict = {}
if sol_set is not None:
save_dict['solutions'] = sol_set
if ocp is not None:
save_dict['ocp'] = ocp
if bvp is not None:
save_dict['bvp'] = bvp
with open(filename, 'wb') as file:
pickle.dump(save_dict, file)
def load(filename):
with open(filename, 'rb') as file:
save_dict = pickle.load(file)
return save_dict
| 17.892857 | 67 | 0.616766 | import cloudpickle as pickle
def save(sol_set=None, ocp=None, bvp=None, filename='data.beluga'):
save_dict = {}
if sol_set is not None:
save_dict['solutions'] = sol_set
if ocp is not None:
save_dict['ocp'] = ocp
if bvp is not None:
save_dict['bvp'] = bvp
with open(filename, 'wb') as file:
pickle.dump(save_dict, file)
def load(filename):
with open(filename, 'rb') as file:
save_dict = pickle.load(file)
return save_dict
| true | true |
f71d16ce0d148e67cfd6cf57d35cb7d27d3c8dee | 4,540 | py | Python | SampleEncodeMultiThread.py | lferraz/VideoProcessingFramework | 19b87eddc0539d90ae4025629bac7c93c1387d56 | [
"Apache-2.0"
] | 3 | 2021-11-27T18:42:58.000Z | 2021-11-27T18:43:05.000Z | SampleEncodeMultiThread.py | lferraz/VideoProcessingFramework | 19b87eddc0539d90ae4025629bac7c93c1387d56 | [
"Apache-2.0"
] | null | null | null | SampleEncodeMultiThread.py | lferraz/VideoProcessingFramework | 19b87eddc0539d90ae4025629bac7c93c1387d56 | [
"Apache-2.0"
] | null | null | null | #
# Copyright 2020 NVIDIA Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Starting from Python 3.8 DLL search policy has changed.
# We need to add path to CUDA DLLs explicitly.
import sys
import os
if os.name == 'nt':
# Add CUDA_PATH env variable
cuda_path = os.environ["CUDA_PATH"]
if cuda_path:
os.add_dll_directory(cuda_path)
else:
print("CUDA_PATH environment variable is not set.", file = sys.stderr)
print("Can't set CUDA DLLs search path.", file = sys.stderr)
exit(1)
# Add PATH as well for minor CUDA releases
sys_path = os.environ["PATH"]
if sys_path:
paths = sys_path.split(';')
for path in paths:
if os.path.isdir(path):
os.add_dll_directory(path)
else:
print("PATH environment variable is not set.", file = sys.stderr)
exit(1)
import PyNvCodec as nvc
import numpy as np
from threading import Thread
class Worker(Thread):
def __init__(self, gpuID, width, height, rawFilePath, encFilePath):
Thread.__init__(self)
res = width + 'x' + height
self.nvUpl = nvc.PyFrameUploader(int(width), int(height), nvc.PixelFormat.YUV420, gpuID)
self.nvCvt = nvc.PySurfaceConverter(int(width), int(height), nvc.PixelFormat.YUV420, nvc.PixelFormat.NV12, gpuID)
self.nvEnc = nvc.PyNvEncoder({'preset': 'hq', 'codec': 'h264', 's': res}, gpuID)
self.encFile = open(encFilePath, "wb")
self.rawFile = open(rawFilePath, "rb")
def run(self):
try:
while True:
frameSize = self.nvEnc.Width() * self.nvEnc.Height() * 3 / 2
rawFrame = np.fromfile(self.rawFile, np.uint8, count = int(frameSize))
if not (rawFrame.size):
print('No more video frames')
break
rawSurface = self.nvUpl.UploadSingleFrame(rawFrame)
if (rawSurface.Empty()):
print('Failed to upload video frame to GPU')
break
cvtSurface = self.nvCvt.Execute(rawSurface)
if (cvtSurface.Empty()):
print('Failed to do color conversion')
break
encFrame = np.ndarray(shape=(0), dtype=np.uint8)
success = self.nvEnc.EncodeSingleSurface(cvtSurface, encFrame)
if(success):
bits = bytearray(encFrame)
self.encFile.write(bits)
#Encoder is asynchronous, so we need to flush it
encFrame = np.ndarray(shape=(0), dtype=np.uint8)
success = self.nvEnc.Flush(encFrame)
if(success):
bits = bytearray(encFrame)
self.encFile.write(bits)
except Exception as e:
print(getattr(e, 'message', str(e)))
decFile.close()
def create_threads(gpu_id1, width_1, height_1, input_file1, output_file1,
gpu_id2, width_2, height_2, input_file2, output_file2):
th1 = Worker(gpu_id1, width_1, height_1, input_file1, output_file1)
th2 = Worker(gpu_id2, width_2, height_2, input_file2, output_file2)
th1.start()
th2.start()
th1.join()
th2.join()
if __name__ == "__main__":
print("This sample encodes 2 videos simultaneously from YUV files into 1/4 of initial size.")
print("Usage: SampleDecode.py $gpu_id1 $width_1 $height_1 $input_file1 $output_file_1 $gpu_id2 $width_2 $height_2 $input_file2 $output_file2")
if(len(sys.argv) < 11):
print("Provide input CLI arguments as shown above")
exit(1)
gpu_1 = int(sys.argv[1])
width_1 = sys.argv[2]
height_1 = sys.argv[3]
input_1 = sys.argv[4]
output_1 = sys.argv[5]
gpu_2 = int(sys.argv[6])
width_2 = sys.argv[7]
height_2 = sys.argv[8]
input_2 = sys.argv[9]
output_2 = sys.argv[10]
create_threads(gpu_1, width_1, height_1, input_1, output_1, gpu_2, width_2, height_2, input_2, output_2)
| 34.923077 | 146 | 0.619604 |
import sys
import os
if os.name == 'nt':
cuda_path = os.environ["CUDA_PATH"]
if cuda_path:
os.add_dll_directory(cuda_path)
else:
print("CUDA_PATH environment variable is not set.", file = sys.stderr)
print("Can't set CUDA DLLs search path.", file = sys.stderr)
exit(1)
# Add PATH as well for minor CUDA releases
sys_path = os.environ["PATH"]
if sys_path:
paths = sys_path.split(';')
for path in paths:
if os.path.isdir(path):
os.add_dll_directory(path)
else:
print("PATH environment variable is not set.", file = sys.stderr)
exit(1)
import PyNvCodec as nvc
import numpy as np
from threading import Thread
class Worker(Thread):
def __init__(self, gpuID, width, height, rawFilePath, encFilePath):
Thread.__init__(self)
res = width + 'x' + height
self.nvUpl = nvc.PyFrameUploader(int(width), int(height), nvc.PixelFormat.YUV420, gpuID)
self.nvCvt = nvc.PySurfaceConverter(int(width), int(height), nvc.PixelFormat.YUV420, nvc.PixelFormat.NV12, gpuID)
self.nvEnc = nvc.PyNvEncoder({'preset': 'hq', 'codec': 'h264', 's': res}, gpuID)
self.encFile = open(encFilePath, "wb")
self.rawFile = open(rawFilePath, "rb")
def run(self):
try:
while True:
frameSize = self.nvEnc.Width() * self.nvEnc.Height() * 3 / 2
rawFrame = np.fromfile(self.rawFile, np.uint8, count = int(frameSize))
if not (rawFrame.size):
print('No more video frames')
break
rawSurface = self.nvUpl.UploadSingleFrame(rawFrame)
if (rawSurface.Empty()):
print('Failed to upload video frame to GPU')
break
cvtSurface = self.nvCvt.Execute(rawSurface)
if (cvtSurface.Empty()):
print('Failed to do color conversion')
break
encFrame = np.ndarray(shape=(0), dtype=np.uint8)
success = self.nvEnc.EncodeSingleSurface(cvtSurface, encFrame)
if(success):
bits = bytearray(encFrame)
self.encFile.write(bits)
#Encoder is asynchronous, so we need to flush it
encFrame = np.ndarray(shape=(0), dtype=np.uint8)
success = self.nvEnc.Flush(encFrame)
if(success):
bits = bytearray(encFrame)
self.encFile.write(bits)
except Exception as e:
print(getattr(e, 'message', str(e)))
decFile.close()
def create_threads(gpu_id1, width_1, height_1, input_file1, output_file1,
gpu_id2, width_2, height_2, input_file2, output_file2):
th1 = Worker(gpu_id1, width_1, height_1, input_file1, output_file1)
th2 = Worker(gpu_id2, width_2, height_2, input_file2, output_file2)
th1.start()
th2.start()
th1.join()
th2.join()
if __name__ == "__main__":
print("This sample encodes 2 videos simultaneously from YUV files into 1/4 of initial size.")
print("Usage: SampleDecode.py $gpu_id1 $width_1 $height_1 $input_file1 $output_file_1 $gpu_id2 $width_2 $height_2 $input_file2 $output_file2")
if(len(sys.argv) < 11):
print("Provide input CLI arguments as shown above")
exit(1)
gpu_1 = int(sys.argv[1])
width_1 = sys.argv[2]
height_1 = sys.argv[3]
input_1 = sys.argv[4]
output_1 = sys.argv[5]
gpu_2 = int(sys.argv[6])
width_2 = sys.argv[7]
height_2 = sys.argv[8]
input_2 = sys.argv[9]
output_2 = sys.argv[10]
create_threads(gpu_1, width_1, height_1, input_1, output_1, gpu_2, width_2, height_2, input_2, output_2)
| true | true |
f71d179c754d7a21f6f4bd8d43a433d9f6e7e9ef | 808 | py | Python | files/src/render-docker-images.py | muellerbe/container-image-osism-ansible | cb503819d0cf7548e5be711d635beedccf711bad | [
"Apache-2.0"
] | null | null | null | files/src/render-docker-images.py | muellerbe/container-image-osism-ansible | cb503819d0cf7548e5be711d635beedccf711bad | [
"Apache-2.0"
] | 9 | 2017-12-01T11:51:26.000Z | 2020-11-21T12:55:17.000Z | files/src/render-docker-images.py | muellerbe/container-image-osism-ansible | cb503819d0cf7548e5be711d635beedccf711bad | [
"Apache-2.0"
] | 2 | 2020-09-15T05:52:06.000Z | 2020-11-13T08:27:00.000Z | import os
import jinja2
import yaml
# get environment parameters
VERSION = os.environ.get("VERSION", "latest")
# load versions files from release repository
with open("/release/%s/base.yml" % VERSION, "rb") as fp:
versions = yaml.load(fp, Loader=yaml.FullLoader)
with open("/release/etc/images.yml", "rb") as fp:
images = yaml.load(fp, Loader=yaml.FullLoader)
# prepare jinja2 environment
loader = jinja2.FileSystemLoader(searchpath="templates/")
environment = jinja2.Environment(loader=loader)
# render images.yml
template = environment.get_template("images.yml.j2")
result = template.render({
'images': images,
'manager_version': versions['manager_version'],
'versions': versions['docker_images']
})
with open("/ansible/group_vars/all/images.yml", "w+") as fp:
fp.write(result)
| 24.484848 | 60 | 0.72896 | import os
import jinja2
import yaml
VERSION = os.environ.get("VERSION", "latest")
with open("/release/%s/base.yml" % VERSION, "rb") as fp:
versions = yaml.load(fp, Loader=yaml.FullLoader)
with open("/release/etc/images.yml", "rb") as fp:
images = yaml.load(fp, Loader=yaml.FullLoader)
loader = jinja2.FileSystemLoader(searchpath="templates/")
environment = jinja2.Environment(loader=loader)
template = environment.get_template("images.yml.j2")
result = template.render({
'images': images,
'manager_version': versions['manager_version'],
'versions': versions['docker_images']
})
with open("/ansible/group_vars/all/images.yml", "w+") as fp:
fp.write(result)
| true | true |
f71d17b8fa7c1e98b9b2a3378d1c642625e51b35 | 1,406 | bzl | Python | internal/runner_bin.bzl | davidmorgan/rules_postcss | 80042f107ef4e68f40b77d4d795eb0dbed340095 | [
"Apache-2.0"
] | null | null | null | internal/runner_bin.bzl | davidmorgan/rules_postcss | 80042f107ef4e68f40b77d4d795eb0dbed340095 | [
"Apache-2.0"
] | null | null | null | internal/runner_bin.bzl | davidmorgan/rules_postcss | 80042f107ef4e68f40b77d4d795eb0dbed340095 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 The Bazel Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""PostCSS runner binary rule.
Creates a nodejs_binary given our generated internal runner source. This file
can be substituted in your copies of these build rules, for example due to
differences in Node.js/Starlark build rules."""
load("@build_bazel_rules_nodejs//:index.bzl", "nodejs_binary")
def postcss_runner_bin(
name,
src,
deps,
**kwargs):
"""Convenience helper for using nodejs_binary with the PostCSS runner.
Args:
name: The name of the build rule.
src: The source file and entry point of the nodejs_binary.
deps: What the nodejs_binary depends on.
**kwargs: Additional arguments to pass to nodejs_binary().
"""
nodejs_binary(
name = name,
entry_point = ":%s" % (src),
data = deps,
**kwargs
)
| 32.697674 | 77 | 0.697724 |
load("@build_bazel_rules_nodejs//:index.bzl", "nodejs_binary")
def postcss_runner_bin(
name,
src,
deps,
**kwargs):
nodejs_binary(
name = name,
entry_point = ":%s" % (src),
data = deps,
**kwargs
)
| true | true |
f71d181a178ed854c881eea638143b829167c292 | 569 | py | Python | yametrikapy/client.py | tezmen/yametrika-api | d6537ee3246a9e2c6f43cff1c6710c616595fcf5 | [
"MIT"
] | null | null | null | yametrikapy/client.py | tezmen/yametrika-api | d6537ee3246a9e2c6f43cff1c6710c616595fcf5 | [
"MIT"
] | null | null | null | yametrikapy/client.py | tezmen/yametrika-api | d6537ee3246a9e2c6f43cff1c6710c616595fcf5 | [
"MIT"
] | null | null | null | # coding: utf-8
import requests
from urllib.parse import urlencode
class APIClient(object):
def __init__(self):
self.status = 0
self.reason = ''
self.HEADERS = {}
def urlencode(self, **kwargs):
return urlencode(kwargs)
def request(self, method, url, params=None, headers=None):
if not headers:
headers = self.HEADERS
if str(method).upper() == 'POST':
r = requests.post(url, headers=headers, data=params, timeout=120)
else:
r = requests.get(url, headers=headers, params=params, timeout=120)
self.status = r.status_code
return r.text
| 21.884615 | 69 | 0.695958 |
import requests
from urllib.parse import urlencode
class APIClient(object):
def __init__(self):
self.status = 0
self.reason = ''
self.HEADERS = {}
def urlencode(self, **kwargs):
return urlencode(kwargs)
def request(self, method, url, params=None, headers=None):
if not headers:
headers = self.HEADERS
if str(method).upper() == 'POST':
r = requests.post(url, headers=headers, data=params, timeout=120)
else:
r = requests.get(url, headers=headers, params=params, timeout=120)
self.status = r.status_code
return r.text
| false | true |
f71d181e5cd47420840ccad0fb1e96326ecbda24 | 4,411 | py | Python | mirage/core/argParser.py | epablosensei/mirage | 3c0d2fb0f0e570356e7126c999e83e0256920420 | [
"MIT"
] | 123 | 2019-11-20T19:53:23.000Z | 2022-03-07T19:51:03.000Z | mirage/core/argParser.py | epablosensei/mirage | 3c0d2fb0f0e570356e7126c999e83e0256920420 | [
"MIT"
] | 23 | 2019-10-22T13:53:34.000Z | 2022-03-22T22:22:55.000Z | mirage/core/argParser.py | epablosensei/mirage | 3c0d2fb0f0e570356e7126c999e83e0256920420 | [
"MIT"
] | 25 | 2019-11-15T12:13:48.000Z | 2021-12-22T00:21:15.000Z | from mirage.libs import io
import sys
class ArgParser:
'''
This class allows to easily parse parameters from command line.
'''
def __init__(self,appInstance=None):
'''
This constructor allows to keep a pointer on the main Application instance.
:param appInstance: instance of the main Application (core.app.App)
:type appInstance: core.app.App
'''
self.appInstance = appInstance
def debug(self):
'''
This method checks if the debug parameter has been provided by the user on the command line.
It will modify the attribute ``debugMode`` stored in the provided instance of core.app.App.
'''
if "--debug" in sys.argv:
self.appInstance.debugMode = True
sys.argv.remove("--debug")
def quiet(self):
'''
This method checks if the quiet parameter has been provided by the user on the command line.
It will modify the attribute ``quiet`` stored in the provided instance of core.app.App.
'''
if "--quiet" in sys.argv:
self.appInstance.quiet = True
sys.argv.remove("--quiet")
def verbosity(self):
'''
This method checks if the verbosity parameter has been provided by the user on the command line.
It will modify the variable ``VERBOSITY_LEVEL`` stored in libs.io.
'''
verbosity = [arg for arg in sys.argv if "--verbosity=" in arg]
if len(verbosity) > 0:
(_,value) = verbosity[-1].split("--verbosity=")
if value.upper() == "NONE" or value == "0":
io.VERBOSITY_LEVEL = io.VerbosityLevels.NONE
elif value.upper() == "NO_INFO_AND_WARNING" or value == "1":
io.VERBOSITY_LEVEL = io.VerbosityLevels.NO_INFO_AND_WARNING
elif value.upper() == "NO_INFO" or value=="2":
io.VERBOSITY_LEVEL = io.VerbosityLevels.NO_INFO
else:
io.VERBOSITY_LEVEL = io.VerbosityLevels.ALL
for arg in sys.argv:
if "--verbosity=" in arg:
sys.argv.remove(arg)
def create_module(self):
'''
This method checks if the create_module parameter has been provided by the user on the command line.
It will call the method ``create_module`` of the main application instance (core.app.App).
'''
if "--create_module" in sys.argv:
self.appInstance.create_module()
return True
return False
def create_scenario(self):
'''
This method checks if the create_scenario parameter has been provided by the user on the command line.
It will call the method ``create_scenario`` of the main application instance (core.app.App).
'''
if "--create_scenario" in sys.argv:
self.appInstance.create_scenario()
return True
return False
def list(self):
'''
This method checks if the list parameter has been provided by the user on the command line.
It will call the method ``list`` of the main application instance (core.app.App).
'''
if "--list" in sys.argv:
self.appInstance.list()
return True
else:
applist = [arg for arg in sys.argv if "--list=" in arg]
if len(applist) > 0:
(_,pattern) = applist[-1].split("--list=")
self.appInstance.list(pattern=pattern)
return True
return False
def launcher(self):
'''
This method checks if a Mirage module to run has been provided by the user on the command line.
It will load and run the corresponding module with the parameters provided by the user.
:Example:
``./mirage.py moduleName PARAMETER1=value1 PARAMETER2=value2 PARAMETER3=value3``
'''
module = sys.argv[1]
self.appInstance.load(module)
if len(self.appInstance.modules) > 0:
if "--args" in sys.argv or "--showargs" in sys.argv:
self.appInstance.args()
exit(1)
else:
for arg in sys.argv[2:]:
arg = arg.split("=",1)
if len(arg) == 2:
(name,value) = arg
self.appInstance.set(name,value)
else:
io.fail("Incorrect parameter : "+str(arg))
exit(1)
self.appInstance.run()
self.appInstance.exit()
def run(self):
'''
This method checks if Mirage has been launched with some parameters.
- If no Mirage module has been provided by the user on the command line, it will launch the main application loop
(method ``loop`` of core.app.App)
- If a Mirage module has been provided by the user, it calls the method ``launcher`` of core.argParser.ArgParser.
'''
self.debug()
self.quiet()
self.verbosity()
if self.create_module() or self.create_scenario():
self.appInstance.exit()
elif not self.list():
if len(sys.argv) == 1:
self.appInstance.loop()
else:
self.launcher()
| 30.631944 | 115 | 0.688959 | from mirage.libs import io
import sys
class ArgParser:
def __init__(self,appInstance=None):
self.appInstance = appInstance
def debug(self):
if "--debug" in sys.argv:
self.appInstance.debugMode = True
sys.argv.remove("--debug")
def quiet(self):
if "--quiet" in sys.argv:
self.appInstance.quiet = True
sys.argv.remove("--quiet")
def verbosity(self):
verbosity = [arg for arg in sys.argv if "--verbosity=" in arg]
if len(verbosity) > 0:
(_,value) = verbosity[-1].split("--verbosity=")
if value.upper() == "NONE" or value == "0":
io.VERBOSITY_LEVEL = io.VerbosityLevels.NONE
elif value.upper() == "NO_INFO_AND_WARNING" or value == "1":
io.VERBOSITY_LEVEL = io.VerbosityLevels.NO_INFO_AND_WARNING
elif value.upper() == "NO_INFO" or value=="2":
io.VERBOSITY_LEVEL = io.VerbosityLevels.NO_INFO
else:
io.VERBOSITY_LEVEL = io.VerbosityLevels.ALL
for arg in sys.argv:
if "--verbosity=" in arg:
sys.argv.remove(arg)
def create_module(self):
if "--create_module" in sys.argv:
self.appInstance.create_module()
return True
return False
def create_scenario(self):
if "--create_scenario" in sys.argv:
self.appInstance.create_scenario()
return True
return False
def list(self):
if "--list" in sys.argv:
self.appInstance.list()
return True
else:
applist = [arg for arg in sys.argv if "--list=" in arg]
if len(applist) > 0:
(_,pattern) = applist[-1].split("--list=")
self.appInstance.list(pattern=pattern)
return True
return False
def launcher(self):
module = sys.argv[1]
self.appInstance.load(module)
if len(self.appInstance.modules) > 0:
if "--args" in sys.argv or "--showargs" in sys.argv:
self.appInstance.args()
exit(1)
else:
for arg in sys.argv[2:]:
arg = arg.split("=",1)
if len(arg) == 2:
(name,value) = arg
self.appInstance.set(name,value)
else:
io.fail("Incorrect parameter : "+str(arg))
exit(1)
self.appInstance.run()
self.appInstance.exit()
def run(self):
self.debug()
self.quiet()
self.verbosity()
if self.create_module() or self.create_scenario():
self.appInstance.exit()
elif not self.list():
if len(sys.argv) == 1:
self.appInstance.loop()
else:
self.launcher()
| true | true |
f71d192679d31c54701073adca22e402f634fedf | 2,228 | py | Python | 3D-Drucker-Ender3V2/Marlin-2.0.x/buildroot/share/PlatformIO/scripts/mc-apply.py | arendtchris/arendtchris-fpv-configuration | f9304f19e6cedb219ff04c1ecec4388451c300ce | [
"MIT"
] | null | null | null | 3D-Drucker-Ender3V2/Marlin-2.0.x/buildroot/share/PlatformIO/scripts/mc-apply.py | arendtchris/arendtchris-fpv-configuration | f9304f19e6cedb219ff04c1ecec4388451c300ce | [
"MIT"
] | null | null | null | 3D-Drucker-Ender3V2/Marlin-2.0.x/buildroot/share/PlatformIO/scripts/mc-apply.py | arendtchris/arendtchris-fpv-configuration | f9304f19e6cedb219ff04c1ecec4388451c300ce | [
"MIT"
] | null | null | null | #!/usr/bin/env python
#
# Create a Configuration from marlin_config.json
#
import json
import sys
import shutil
import re
opt_output = '--opt' in sys.argv
output_suffix = '.sh' if opt_output else '' if '--bare-output' in sys.argv else '.gen'
try:
with open('marlin_config.json', 'r') as infile:
conf = json.load(infile)
for key in conf:
# We don't care about the hash when restoring here
if key == '__INITIAL_HASH':
continue
if key == 'VERSION':
for k, v in sorted(conf[key].items()):
print(k + ': ' + v)
continue
# The key is the file name, so let's build it now
outfile = open('Marlin/' + key + output_suffix, 'w')
for k, v in sorted(conf[key].items()):
# Make define line now
if opt_output:
if v != '':
if '"' in v:
v = "'%s'" % v
elif ' ' in v:
v = '"%s"' % v
define = 'opt_set ' + k + ' ' + v + '\n'
else:
define = 'opt_enable ' + k + '\n'
else:
define = '#define ' + k + ' ' + v + '\n'
outfile.write(define)
outfile.close()
# Try to apply changes to the actual configuration file (in order to keep useful comments)
if output_suffix != '':
# Move the existing configuration so it doesn't interfere
shutil.move('Marlin/' + key, 'Marlin/' + key + '.orig')
infile_lines = open('Marlin/' + key + '.orig', 'r').read().split('\n')
outfile = open('Marlin/' + key, 'w')
for line in infile_lines:
sline = line.strip(" \t\n\r")
if sline[:7] == "#define":
# Extract the key here (we don't care about the value)
kv = sline[8:].strip().split(' ')
if kv[0] in conf[key]:
outfile.write('#define ' + kv[0] + ' ' + conf[key][kv[0]] + '\n')
# Remove the key from the dict, so we can still write all missing keys at the end of the file
del conf[key][kv[0]]
else:
outfile.write(line + '\n')
else:
outfile.write(line + '\n')
# Process any remaining defines here
for k, v in sorted(conf[key].items()):
define = '#define ' + k + ' ' + v + '\n'
outfile.write(define)
outfile.close()
print('Output configuration written to: ' + 'Marlin/' + key + output_suffix)
except:
print('No marlin_config.json found.')
| 31.828571 | 100 | 0.579443 |
import json
import sys
import shutil
import re
opt_output = '--opt' in sys.argv
output_suffix = '.sh' if opt_output else '' if '--bare-output' in sys.argv else '.gen'
try:
with open('marlin_config.json', 'r') as infile:
conf = json.load(infile)
for key in conf:
if key == '__INITIAL_HASH':
continue
if key == 'VERSION':
for k, v in sorted(conf[key].items()):
print(k + ': ' + v)
continue
# The key is the file name, so let's build it now
outfile = open('Marlin/' + key + output_suffix, 'w')
for k, v in sorted(conf[key].items()):
if opt_output:
if v != '':
if '"' in v:
v = "'%s'" % v
elif ' ' in v:
v = '"%s"' % v
define = 'opt_set ' + k + ' ' + v + '\n'
else:
define = 'opt_enable ' + k + '\n'
else:
define = '#define ' + k + ' ' + v + '\n'
outfile.write(define)
outfile.close()
# Try to apply changes to the actual configuration file (in order to keep useful comments)
if output_suffix != '':
# Move the existing configuration so it doesn't interfere
shutil.move('Marlin/' + key, 'Marlin/' + key + '.orig')
infile_lines = open('Marlin/' + key + '.orig', 'r').read().split('\n')
outfile = open('Marlin/' + key, 'w')
for line in infile_lines:
sline = line.strip(" \t\n\r")
if sline[:7] == "#define":
# Extract the key here (we don't care about the value)
kv = sline[8:].strip().split(' ')
if kv[0] in conf[key]:
outfile.write('#define ' + kv[0] + ' ' + conf[key][kv[0]] + '\n')
# Remove the key from the dict, so we can still write all missing keys at the end of the file
del conf[key][kv[0]]
else:
outfile.write(line + '\n')
else:
outfile.write(line + '\n')
# Process any remaining defines here
for k, v in sorted(conf[key].items()):
define = '#define ' + k + ' ' + v + '\n'
outfile.write(define)
outfile.close()
print('Output configuration written to: ' + 'Marlin/' + key + output_suffix)
except:
print('No marlin_config.json found.')
| true | true |
f71d192934266ddcbda06d1e8bc90a9610316354 | 506 | py | Python | molecool/__init__.py | jamesluo274/molecool2 | 664ca06527a3767003f1fdf2697b1f1ff6d0cd23 | [
"MIT"
] | null | null | null | molecool/__init__.py | jamesluo274/molecool2 | 664ca06527a3767003f1fdf2697b1f1ff6d0cd23 | [
"MIT"
] | 1 | 2020-12-18T02:23:38.000Z | 2020-12-18T02:23:38.000Z | molecool/__init__.py | jamesluo274/molecool2 | 664ca06527a3767003f1fdf2697b1f1ff6d0cd23 | [
"MIT"
] | null | null | null | """
molecool
A Python package for analyzing and visualizing xyz files.
"""
# Add imports here
from .functions import canvas
from .measure import calculate_angle, calculate_distance
from .visulize import draw_molecule, bond_histogram
from .molecule import build_bond_list, calculate_molecular_mass
from . import io
# Handle versioneer
from ._version import get_versions
versions = get_versions()
__version__ = versions['version']
__git_revision__ = versions['full-revisionid']
del get_versions, versions
| 25.3 | 63 | 0.814229 |
from .functions import canvas
from .measure import calculate_angle, calculate_distance
from .visulize import draw_molecule, bond_histogram
from .molecule import build_bond_list, calculate_molecular_mass
from . import io
from ._version import get_versions
versions = get_versions()
__version__ = versions['version']
__git_revision__ = versions['full-revisionid']
del get_versions, versions
| true | true |
f71d1939601ac77b5c4fcaf0869aaa394e34a6e7 | 18,054 | py | Python | django/db/migrations/autodetector.py | anoopksh/django | f00243f36df8dfe504491e03be5d5aea076340b3 | [
"BSD-3-Clause"
] | 1 | 2019-09-21T06:40:37.000Z | 2019-09-21T06:40:37.000Z | django/db/migrations/autodetector.py | anoopksh/django | f00243f36df8dfe504491e03be5d5aea076340b3 | [
"BSD-3-Clause"
] | null | null | null | django/db/migrations/autodetector.py | anoopksh/django | f00243f36df8dfe504491e03be5d5aea076340b3 | [
"BSD-3-Clause"
] | null | null | null | import re
import datetime
from django.db.migrations import operations
from django.db.migrations.migration import Migration
from django.db.migrations.questioner import MigrationQuestioner
class MigrationAutodetector(object):
"""
Takes a pair of ProjectStates, and compares them to see what the
first would need doing to make it match the second (the second
usually being the project's current state).
Note that this naturally operates on entire projects at a time,
as it's likely that changes interact (for example, you can't
add a ForeignKey without having a migration to add the table it
depends on first). A user interface may offer single-app usage
if it wishes, with the caveat that it may not always be possible.
"""
def __init__(self, from_state, to_state, questioner=None):
self.from_state = from_state
self.to_state = to_state
self.questioner = questioner or MigrationQuestioner()
def changes(self, graph, trim_to_apps=None):
"""
Main entry point to produce a list of appliable changes.
Takes a graph to base names on and an optional set of apps
to try and restrict to (restriction is not guaranteed)
"""
changes = self._detect_changes()
changes = self._arrange_for_graph(changes, graph)
if trim_to_apps:
changes = self._trim_to_apps(changes, trim_to_apps)
return changes
def _detect_changes(self):
"""
Returns a dict of migration plans which will achieve the
change from from_state to to_state. The dict has app labels
as keys and a list of migrations as values.
The resulting migrations aren't specially named, but the names
do matter for dependencies inside the set.
"""
# We'll store migrations as lists by app names for now
self.migrations = {}
old_apps = self.from_state.render()
new_apps = self.to_state.render()
# Prepare lists of old/new model keys that we care about
# (i.e. ignoring proxy ones)
old_model_keys = [
(al, mn)
for al, mn in self.from_state.models.keys()
if not old_apps.get_model(al, mn)._meta.proxy
]
new_model_keys = [
(al, mn)
for al, mn in self.to_state.models.keys()
if not new_apps.get_model(al, mn)._meta.proxy
]
# Adding models. Phase 1 is adding models with no outward relationships.
added_models = set(new_model_keys) - set(old_model_keys)
pending_add = {}
for app_label, model_name in added_models:
model_state = self.to_state.models[app_label, model_name]
# Are there any relationships out from this model? if so, punt it to the next phase.
related_fields = []
for field in new_apps.get_model(app_label, model_name)._meta.local_fields:
if field.rel:
if field.rel.to:
related_fields.append((field.name, field.rel.to._meta.app_label.lower(), field.rel.to._meta.object_name.lower()))
if hasattr(field.rel, "through") and not field.rel.though._meta.auto_created:
related_fields.append((field.name, field.rel.through._meta.app_label.lower(), field.rel.through._meta.object_name.lower()))
if related_fields:
pending_add[app_label, model_name] = related_fields
else:
self.add_to_migration(
app_label,
operations.CreateModel(
name=model_state.name,
fields=model_state.fields,
options=model_state.options,
bases=model_state.bases,
)
)
# Phase 2 is progressively adding pending models, splitting up into two
# migrations if required.
pending_new_fks = []
while pending_add:
# Is there one we can add that has all dependencies satisfied?
satisfied = [(m, rf) for m, rf in pending_add.items() if all((al, mn) not in pending_add for f, al, mn in rf)]
if satisfied:
(app_label, model_name), related_fields = sorted(satisfied)[0]
model_state = self.to_state.models[app_label, model_name]
self.add_to_migration(
app_label,
operations.CreateModel(
name=model_state.name,
fields=model_state.fields,
options=model_state.options,
bases=model_state.bases,
)
)
for field_name, other_app_label, other_model_name in related_fields:
if app_label != other_app_label:
self.add_dependency(app_label, other_app_label)
del pending_add[app_label, model_name]
# Ah well, we'll need to split one. Pick deterministically.
else:
(app_label, model_name), related_fields = sorted(pending_add.items())[0]
model_state = self.to_state.models[app_label, model_name]
# Work out the fields that need splitting out
bad_fields = dict((f, (al, mn)) for f, al, mn in related_fields if (al, mn) in pending_add)
# Create the model, without those
self.add_to_migration(
app_label,
operations.CreateModel(
name=model_state.name,
fields=[(n, f) for n, f in model_state.fields if n not in bad_fields],
options=model_state.options,
bases=model_state.bases,
)
)
# Add the bad fields to be made in a phase 3
for field_name, (other_app_label, other_model_name) in bad_fields.items():
pending_new_fks.append((app_label, model_name, field_name, other_app_label))
del pending_add[app_label, model_name]
# Phase 3 is adding the final set of FKs as separate new migrations
for app_label, model_name, field_name, other_app_label in pending_new_fks:
model_state = self.to_state.models[app_label, model_name]
self.add_to_migration(
app_label,
operations.AddField(
model_name=model_name,
name=field_name,
field=model_state.get_field_by_name(field_name),
),
new=True,
)
if app_label != other_app_label:
self.add_dependency(app_label, other_app_label)
# Removing models
removed_models = set(old_model_keys) - set(new_model_keys)
for app_label, model_name in removed_models:
model_state = self.from_state.models[app_label, model_name]
self.add_to_migration(
app_label,
operations.DeleteModel(
model_state.name,
)
)
# Changes within models
kept_models = set(old_model_keys).intersection(new_model_keys)
old_fields = set()
new_fields = set()
for app_label, model_name in kept_models:
old_model_state = self.from_state.models[app_label, model_name]
new_model_state = self.to_state.models[app_label, model_name]
# Collect field changes for later global dealing with (so AddFields
# always come before AlterFields even on separate models)
old_fields.update((app_label, model_name, x) for x, y in old_model_state.fields)
new_fields.update((app_label, model_name, x) for x, y in new_model_state.fields)
# Unique_together changes
if old_model_state.options.get("unique_together", set()) != new_model_state.options.get("unique_together", set()):
self.add_to_migration(
app_label,
operations.AlterUniqueTogether(
name=model_name,
unique_together=new_model_state.options.get("unique_together", set()),
)
)
# New fields
for app_label, model_name, field_name in new_fields - old_fields:
old_model_state = self.from_state.models[app_label, model_name]
new_model_state = self.to_state.models[app_label, model_name]
field = new_model_state.get_field_by_name(field_name)
# Scan to see if this is actually a rename!
field_dec = field.deconstruct()[1:]
found_rename = False
for rem_app_label, rem_model_name, rem_field_name in (old_fields - new_fields):
if rem_app_label == app_label and rem_model_name == model_name:
if old_model_state.get_field_by_name(rem_field_name).deconstruct()[1:] == field_dec:
if self.questioner.ask_rename(model_name, rem_field_name, field_name, field):
self.add_to_migration(
app_label,
operations.RenameField(
model_name=model_name,
old_name=rem_field_name,
new_name=field_name,
)
)
old_fields.remove((rem_app_label, rem_model_name, rem_field_name))
new_fields.remove((app_label, model_name, field_name))
found_rename = True
break
if found_rename:
continue
# You can't just add NOT NULL fields with no default
if not field.null and not field.has_default():
field = field.clone()
field.default = self.questioner.ask_not_null_addition(field_name, model_name)
self.add_to_migration(
app_label,
operations.AddField(
model_name=model_name,
name=field_name,
field=field,
preserve_default=False,
)
)
else:
self.add_to_migration(
app_label,
operations.AddField(
model_name=model_name,
name=field_name,
field=field,
)
)
# Old fields
for app_label, model_name, field_name in old_fields - new_fields:
old_model_state = self.from_state.models[app_label, model_name]
new_model_state = self.to_state.models[app_label, model_name]
self.add_to_migration(
app_label,
operations.RemoveField(
model_name=model_name,
name=field_name,
)
)
# The same fields
for app_label, model_name, field_name in old_fields.intersection(new_fields):
# Did the field change?
old_model_state = self.from_state.models[app_label, model_name]
new_model_state = self.to_state.models[app_label, model_name]
old_field_dec = old_model_state.get_field_by_name(field_name).deconstruct()
new_field_dec = new_model_state.get_field_by_name(field_name).deconstruct()
if old_field_dec != new_field_dec:
self.add_to_migration(
app_label,
operations.AlterField(
model_name=model_name,
name=field_name,
field=new_model_state.get_field_by_name(field_name),
)
)
# Alright, now add internal dependencies
for app_label, migrations in self.migrations.items():
for m1, m2 in zip(migrations, migrations[1:]):
m2.dependencies.append((app_label, m1.name))
# Clean up dependencies
for app_label, migrations in self.migrations.items():
for migration in migrations:
migration.dependencies = list(set(migration.dependencies))
return self.migrations
def add_to_migration(self, app_label, operation, new=False):
migrations = self.migrations.setdefault(app_label, [])
if not migrations or new:
subclass = type("Migration", (Migration,), {"operations": [], "dependencies": []})
instance = subclass("auto_%i" % (len(migrations) + 1), app_label)
migrations.append(instance)
migrations[-1].operations.append(operation)
def add_dependency(self, app_label, other_app_label):
"""
Adds a dependency to app_label's newest migration on
other_app_label's latest migration.
"""
if self.migrations.get(other_app_label, []):
dependency = (other_app_label, self.migrations[other_app_label][-1].name)
else:
dependency = (other_app_label, "__first__")
self.migrations[app_label][-1].dependencies.append(dependency)
def _arrange_for_graph(self, changes, graph):
"""
Takes in a result from changes() and a MigrationGraph,
and fixes the names and dependencies of the changes so they
extend the graph from the leaf nodes for each app.
"""
leaves = graph.leaf_nodes()
name_map = {}
for app_label, migrations in list(changes.items()):
if not migrations:
continue
# Find the app label's current leaf node
app_leaf = None
for leaf in leaves:
if leaf[0] == app_label:
app_leaf = leaf
break
# Do they want an initial migration for this app?
if app_leaf is None and not self.questioner.ask_initial(app_label):
# They don't.
for migration in migrations:
name_map[(app_label, migration.name)] = (app_label, "__first__")
del changes[app_label]
# Work out the next number in the sequence
if app_leaf is None:
next_number = 1
else:
next_number = (self.parse_number(app_leaf[1]) or 0) + 1
# Name each migration
for i, migration in enumerate(migrations):
if i == 0 and app_leaf:
migration.dependencies.append(app_leaf)
if i == 0 and not app_leaf:
new_name = "0001_initial"
else:
new_name = "%04i_%s" % (next_number, self.suggest_name(migration.operations))
name_map[(app_label, migration.name)] = (app_label, new_name)
migration.name = new_name
# Now fix dependencies
for app_label, migrations in changes.items():
for migration in migrations:
migration.dependencies = [name_map.get(d, d) for d in migration.dependencies]
return changes
def _trim_to_apps(self, changes, app_labels):
"""
Takes changes from arrange_for_graph and set of app labels and
returns a modified set of changes which trims out as many migrations
that are not in app_labels as possible.
Note that some other migrations may still be present, as they may be
required dependencies.
"""
# Gather other app dependencies in a first pass
app_dependencies = {}
for app_label, migrations in changes.items():
for migration in migrations:
for dep_app_label, name in migration.dependencies:
app_dependencies.setdefault(app_label, set()).add(dep_app_label)
required_apps = set(app_labels)
# Keep resolving till there's no change
old_required_apps = None
while old_required_apps != required_apps:
old_required_apps = set(required_apps)
for app_label in list(required_apps):
required_apps.update(app_dependencies.get(app_label, set()))
# Remove all migrations that aren't needed
for app_label in list(changes.keys()):
if app_label not in required_apps:
del changes[app_label]
return changes
@classmethod
def suggest_name(cls, ops):
"""
Given a set of operations, suggests a name for the migration
they might represent. Names are not guaranteed to be unique,
but we put some effort in to the fallback name to avoid VCS conflicts
if we can.
"""
if len(ops) == 1:
if isinstance(ops[0], operations.CreateModel):
return ops[0].name.lower()
elif isinstance(ops[0], operations.DeleteModel):
return "delete_%s" % ops[0].name.lower()
elif isinstance(ops[0], operations.AddField):
return "%s_%s" % (ops[0].model_name.lower(), ops[0].name.lower())
elif isinstance(ops[0], operations.RemoveField):
return "remove_%s_%s" % (ops[0].model_name.lower(), ops[0].name.lower())
elif all(isinstance(o, operations.CreateModel) for o in ops):
return "_".join(sorted(o.name.lower() for o in ops))
return "auto_%s" % datetime.datetime.now().strftime("%Y%m%d_%H%M")
@classmethod
def parse_number(cls, name):
"""
Given a migration name, tries to extract a number from the
beginning of it. If no number found, returns None.
"""
if re.match(r"^\d+_", name):
return int(name.split("_")[0])
return None
| 47.510526 | 147 | 0.577877 | import re
import datetime
from django.db.migrations import operations
from django.db.migrations.migration import Migration
from django.db.migrations.questioner import MigrationQuestioner
class MigrationAutodetector(object):
def __init__(self, from_state, to_state, questioner=None):
self.from_state = from_state
self.to_state = to_state
self.questioner = questioner or MigrationQuestioner()
def changes(self, graph, trim_to_apps=None):
changes = self._detect_changes()
changes = self._arrange_for_graph(changes, graph)
if trim_to_apps:
changes = self._trim_to_apps(changes, trim_to_apps)
return changes
def _detect_changes(self):
self.migrations = {}
old_apps = self.from_state.render()
new_apps = self.to_state.render()
# Prepare lists of old/new model keys that we care about
# (i.e. ignoring proxy ones)
old_model_keys = [
(al, mn)
for al, mn in self.from_state.models.keys()
if not old_apps.get_model(al, mn)._meta.proxy
]
new_model_keys = [
(al, mn)
for al, mn in self.to_state.models.keys()
if not new_apps.get_model(al, mn)._meta.proxy
]
# Adding models. Phase 1 is adding models with no outward relationships.
added_models = set(new_model_keys) - set(old_model_keys)
pending_add = {}
for app_label, model_name in added_models:
model_state = self.to_state.models[app_label, model_name]
# Are there any relationships out from this model? if so, punt it to the next phase.
related_fields = []
for field in new_apps.get_model(app_label, model_name)._meta.local_fields:
if field.rel:
if field.rel.to:
related_fields.append((field.name, field.rel.to._meta.app_label.lower(), field.rel.to._meta.object_name.lower()))
if hasattr(field.rel, "through") and not field.rel.though._meta.auto_created:
related_fields.append((field.name, field.rel.through._meta.app_label.lower(), field.rel.through._meta.object_name.lower()))
if related_fields:
pending_add[app_label, model_name] = related_fields
else:
self.add_to_migration(
app_label,
operations.CreateModel(
name=model_state.name,
fields=model_state.fields,
options=model_state.options,
bases=model_state.bases,
)
)
# Phase 2 is progressively adding pending models, splitting up into two
# migrations if required.
pending_new_fks = []
while pending_add:
# Is there one we can add that has all dependencies satisfied?
satisfied = [(m, rf) for m, rf in pending_add.items() if all((al, mn) not in pending_add for f, al, mn in rf)]
if satisfied:
(app_label, model_name), related_fields = sorted(satisfied)[0]
model_state = self.to_state.models[app_label, model_name]
self.add_to_migration(
app_label,
operations.CreateModel(
name=model_state.name,
fields=model_state.fields,
options=model_state.options,
bases=model_state.bases,
)
)
for field_name, other_app_label, other_model_name in related_fields:
if app_label != other_app_label:
self.add_dependency(app_label, other_app_label)
del pending_add[app_label, model_name]
# Ah well, we'll need to split one. Pick deterministically.
else:
(app_label, model_name), related_fields = sorted(pending_add.items())[0]
model_state = self.to_state.models[app_label, model_name]
bad_fields = dict((f, (al, mn)) for f, al, mn in related_fields if (al, mn) in pending_add)
self.add_to_migration(
app_label,
operations.CreateModel(
name=model_state.name,
fields=[(n, f) for n, f in model_state.fields if n not in bad_fields],
options=model_state.options,
bases=model_state.bases,
)
)
for field_name, (other_app_label, other_model_name) in bad_fields.items():
pending_new_fks.append((app_label, model_name, field_name, other_app_label))
del pending_add[app_label, model_name]
for app_label, model_name, field_name, other_app_label in pending_new_fks:
model_state = self.to_state.models[app_label, model_name]
self.add_to_migration(
app_label,
operations.AddField(
model_name=model_name,
name=field_name,
field=model_state.get_field_by_name(field_name),
),
new=True,
)
if app_label != other_app_label:
self.add_dependency(app_label, other_app_label)
removed_models = set(old_model_keys) - set(new_model_keys)
for app_label, model_name in removed_models:
model_state = self.from_state.models[app_label, model_name]
self.add_to_migration(
app_label,
operations.DeleteModel(
model_state.name,
)
)
kept_models = set(old_model_keys).intersection(new_model_keys)
old_fields = set()
new_fields = set()
for app_label, model_name in kept_models:
old_model_state = self.from_state.models[app_label, model_name]
new_model_state = self.to_state.models[app_label, model_name]
old_fields.update((app_label, model_name, x) for x, y in old_model_state.fields)
new_fields.update((app_label, model_name, x) for x, y in new_model_state.fields)
if old_model_state.options.get("unique_together", set()) != new_model_state.options.get("unique_together", set()):
self.add_to_migration(
app_label,
operations.AlterUniqueTogether(
name=model_name,
unique_together=new_model_state.options.get("unique_together", set()),
)
)
for app_label, model_name, field_name in new_fields - old_fields:
old_model_state = self.from_state.models[app_label, model_name]
new_model_state = self.to_state.models[app_label, model_name]
field = new_model_state.get_field_by_name(field_name)
field_dec = field.deconstruct()[1:]
found_rename = False
for rem_app_label, rem_model_name, rem_field_name in (old_fields - new_fields):
if rem_app_label == app_label and rem_model_name == model_name:
if old_model_state.get_field_by_name(rem_field_name).deconstruct()[1:] == field_dec:
if self.questioner.ask_rename(model_name, rem_field_name, field_name, field):
self.add_to_migration(
app_label,
operations.RenameField(
model_name=model_name,
old_name=rem_field_name,
new_name=field_name,
)
)
old_fields.remove((rem_app_label, rem_model_name, rem_field_name))
new_fields.remove((app_label, model_name, field_name))
found_rename = True
break
if found_rename:
continue
if not field.null and not field.has_default():
field = field.clone()
field.default = self.questioner.ask_not_null_addition(field_name, model_name)
self.add_to_migration(
app_label,
operations.AddField(
model_name=model_name,
name=field_name,
field=field,
preserve_default=False,
)
)
else:
self.add_to_migration(
app_label,
operations.AddField(
model_name=model_name,
name=field_name,
field=field,
)
)
# Old fields
for app_label, model_name, field_name in old_fields - new_fields:
old_model_state = self.from_state.models[app_label, model_name]
new_model_state = self.to_state.models[app_label, model_name]
self.add_to_migration(
app_label,
operations.RemoveField(
model_name=model_name,
name=field_name,
)
)
# The same fields
for app_label, model_name, field_name in old_fields.intersection(new_fields):
# Did the field change?
old_model_state = self.from_state.models[app_label, model_name]
new_model_state = self.to_state.models[app_label, model_name]
old_field_dec = old_model_state.get_field_by_name(field_name).deconstruct()
new_field_dec = new_model_state.get_field_by_name(field_name).deconstruct()
if old_field_dec != new_field_dec:
self.add_to_migration(
app_label,
operations.AlterField(
model_name=model_name,
name=field_name,
field=new_model_state.get_field_by_name(field_name),
)
)
# Alright, now add internal dependencies
for app_label, migrations in self.migrations.items():
for m1, m2 in zip(migrations, migrations[1:]):
m2.dependencies.append((app_label, m1.name))
# Clean up dependencies
for app_label, migrations in self.migrations.items():
for migration in migrations:
migration.dependencies = list(set(migration.dependencies))
return self.migrations
def add_to_migration(self, app_label, operation, new=False):
migrations = self.migrations.setdefault(app_label, [])
if not migrations or new:
subclass = type("Migration", (Migration,), {"operations": [], "dependencies": []})
instance = subclass("auto_%i" % (len(migrations) + 1), app_label)
migrations.append(instance)
migrations[-1].operations.append(operation)
def add_dependency(self, app_label, other_app_label):
if self.migrations.get(other_app_label, []):
dependency = (other_app_label, self.migrations[other_app_label][-1].name)
else:
dependency = (other_app_label, "__first__")
self.migrations[app_label][-1].dependencies.append(dependency)
def _arrange_for_graph(self, changes, graph):
leaves = graph.leaf_nodes()
name_map = {}
for app_label, migrations in list(changes.items()):
if not migrations:
continue
# Find the app label's current leaf node
app_leaf = None
for leaf in leaves:
if leaf[0] == app_label:
app_leaf = leaf
break
if app_leaf is None and not self.questioner.ask_initial(app_label):
for migration in migrations:
name_map[(app_label, migration.name)] = (app_label, "__first__")
del changes[app_label]
# Work out the next number in the sequence
if app_leaf is None:
next_number = 1
else:
next_number = (self.parse_number(app_leaf[1]) or 0) + 1
# Name each migration
for i, migration in enumerate(migrations):
if i == 0 and app_leaf:
migration.dependencies.append(app_leaf)
if i == 0 and not app_leaf:
new_name = "0001_initial"
else:
new_name = "%04i_%s" % (next_number, self.suggest_name(migration.operations))
name_map[(app_label, migration.name)] = (app_label, new_name)
migration.name = new_name
# Now fix dependencies
for app_label, migrations in changes.items():
for migration in migrations:
migration.dependencies = [name_map.get(d, d) for d in migration.dependencies]
return changes
def _trim_to_apps(self, changes, app_labels):
# Gather other app dependencies in a first pass
app_dependencies = {}
for app_label, migrations in changes.items():
for migration in migrations:
for dep_app_label, name in migration.dependencies:
app_dependencies.setdefault(app_label, set()).add(dep_app_label)
required_apps = set(app_labels)
# Keep resolving till there's no change
old_required_apps = None
while old_required_apps != required_apps:
old_required_apps = set(required_apps)
for app_label in list(required_apps):
required_apps.update(app_dependencies.get(app_label, set()))
for app_label in list(changes.keys()):
if app_label not in required_apps:
del changes[app_label]
return changes
@classmethod
def suggest_name(cls, ops):
if len(ops) == 1:
if isinstance(ops[0], operations.CreateModel):
return ops[0].name.lower()
elif isinstance(ops[0], operations.DeleteModel):
return "delete_%s" % ops[0].name.lower()
elif isinstance(ops[0], operations.AddField):
return "%s_%s" % (ops[0].model_name.lower(), ops[0].name.lower())
elif isinstance(ops[0], operations.RemoveField):
return "remove_%s_%s" % (ops[0].model_name.lower(), ops[0].name.lower())
elif all(isinstance(o, operations.CreateModel) for o in ops):
return "_".join(sorted(o.name.lower() for o in ops))
return "auto_%s" % datetime.datetime.now().strftime("%Y%m%d_%H%M")
@classmethod
def parse_number(cls, name):
if re.match(r"^\d+_", name):
return int(name.split("_")[0])
return None
| true | true |
f71d195ce940b491c55de3778d47b6668d1f5280 | 8,949 | py | Python | app.py | WilliBobadilla/Salud-Web | fbfb38d6726005d84b76bf9432cba6aff9bd88b4 | [
"MIT"
] | null | null | null | app.py | WilliBobadilla/Salud-Web | fbfb38d6726005d84b76bf9432cba6aff9bd88b4 | [
"MIT"
] | null | null | null | app.py | WilliBobadilla/Salud-Web | fbfb38d6726005d84b76bf9432cba6aff9bd88b4 | [
"MIT"
] | null | null | null | from flask import Flask,render_template,request,redirect
import socket
app = Flask(__name__)
cedula=""
ip='192.168.1.130'
# persona1[cedula][nombre] ="jorge"
# persona1[cedula][nombre] = valor_del_form
persona1 ={
"5591945": {
"nombre":"Mauricio",
"apellido":"Acosta",
"fecha_de_nacimiento":"23-12-2002",
"sexo":"m",
"ciudad":"San_Antonio",
"grupo_sanguineo":"positivo",
"telefono":"98768785",
"vacunas":[
{"fecha":"18-06-2019","vacuna_aplicada":"antigripal","proxima_vacuna":"28-10-2020"},
{"fecha":"23-03-2018","vacuna_aplicada":"antirabico","proxima_vacuna":"18-08-2019"},
{"fecha":"13-07-2019","vacuna_aplicada":"antitetanico","proxima_vacuna":"20-02-2019"}
],
"historial_medico":[
{"fecha_inicio":"02-02-2019","diagnostico_medico":"infeccion_en_las_vias_orinarias",
"estudios_realizados":"analisis_de_orina","receta":"antibioticos","fecha_culminacion":"10-02-2019"}
],
"estudios_medicos":[
{"fecha_de_inicio":"11-03-2019","diagnostico_medico":"fiebre","estudio_realizados":"analisis_de_orina",
"receta":"z-mol","proxima_consulta":"14-03-2019"}
]
},
"982145358":{
"nombre":"paula",
"apellido": "santacruz",
"fecha_de_nacimiento":"24-09-2002",
"sexo":"f",
"ciudad":"capiata",
"grupo_sanguineo":"positivo",
"telefono":"765765",
"vacunas":[
{"fecha":"10-08-2019","vacuna_aplicada":"antigripal","proxima_vacuna":"10-08-2020"},
{"fecha":"26-12-2019","vacuna_aplicada":"viruela","proxima_vacuna":"28-10-2020"},
{"fecha":"13-02-2019","vacuna_aplicada":"hepatitis","proxima_vacuna":"28-10-2020"}]
}
}
"""
persona = [
{
"cedula":5591945,
"nombre":"Mauricio",
"apellido": "Acosta",
"fecha_de_nacimiento":"23-12-2002",
"sexo":"m",
"ciudad":"San_Antonio",
"grupo_sanguineo":"positivo",
"telefono":"98768785",
"vacunas":[
{"fecha":"18-06-2019","vacuna_aplicada":"antigripal","proxima_vacuna":"28-10-2020"},
{"fecha":"23-03-2018","vacuna_aplicada":"antirabico","proxima_vacuna":"18-08-2019"},
{"fecha":"13-07-2019","vacuna_aplicada":"antitetanico","proxima vacuna":"20-02-2019"}
]
},
{
"cedula": 6970882,
"nombre": "laura",
"apellido": "Acosta",
"fecha_de_nacimiento":"23-12-2002",
"sexo":"m",
}
]
database = {
"5591945":{"nombre":"Mauricio","apellido": "Acosta","fecha_de_nacimiento":"23-12-2002","sexo":"m",
"ciudad":"San_Antonio","grupo_sanguineo":"positivo","telefono":"98768785",
"vacunas":[
{"fecha":"18-06-2019","vacuna_aplicada":"antigripal","proxima_vacuna":"28-10-2020"},
{"fecha":"23-03-2018","vacuna_aplicada":"antirabico","proxima_vacuna":"18-08-2019"},
{"fecha":"13-07-2019","vacuna_aplicada":"antitetanico","proxima vacuna":"20-02-2019"}
]
},
{
"cedula":982145358,
"nombre":"paula",
"apellido": "santacruz",
"fecha_de_nacimiento":"24-09-2002",
"sexo":"f",
"ciudad":"capiata",
"grupo_sanguineo":"positivo",
"telefono":"765765",
"vacunas":[
{"fecha":"10-08-2019","vacuna_aplicada":"antigripal","proxima_vacuna":"10-08-2020"},
{"fecha":"26-12-2019","vacuna_aplicada":"viruela","proxima_vacuna":"28-10-2020"},
{"fecha":"13-02-2019","vacuna_aplicada":"hepatitis","proxima_vacuna":"28-10-2020"}]
},
{
"cedula":972543207,
"nombre":"laura",
"apellido": "gomez",
"fecha_de_nacimiento":"27-04-2019",
"sexo":"f",
"ciudad":"villa ygatimi",
"grupo_sanguineo":"0+",
"telefono":"765765",
"vacunas":[
{"fecha":"16-09-2019","vacuna_aplicada":"antigripal","proxima_vacuna":"11-08-2020"},
{"fecha":"16-12-2019","vacuna_aplicada":"viruela","proxima_vacuna":"20-10-2020"},
{"fecha":"19-02-2019","vacuna_aplicada":"hepatitis","proxima_vacuna":"24-10-2020"}]
},
{
"cedula":98375984,
"nombre":"david",
"apellido": "castro",
"fecha_de_nacimiento":"06-05-1994",
"sexo":"m",
"ciudad":"Chaco",
"grupo_sanguineo":"0+",
"telefono":"765765",
"vacunas":[
{"fecha":"10-08-2019","vacuna_aplicada":"antigripal","proxima_vacuna":"10-08-2020"},
{"fecha":"26-12-2019","vacuna_aplicada":"viruela","proxima_vacuna":"28-10-2020"},
{"fecha":"13-02-2019","vacuna_aplicada":"hepatitis","proxima_vacuna":"28-10-2020"}]
}
]
"""
@app.route("/modificador/<string:cedula>/", methods=['GET','POST'])
def modificador(cedula):
if request.method=='POST':
tel = request.values.get("telefono1") #obtenemos el valor del campo telefono del formulario web
persona1[cedula]["telefono"] = tel #modificamos el diccionario
gru = request.values.get("grupo1")
persona1[cedula]["grupo_sanguineo"] = gru
lug = request.values.get("lugar1")
persona1[cedula]["ciudad"] = lug
fec = request.values.get("fecha1")
persona1[cedula]["fecha_de_nacimiento"] = fec
ape = request.values.get("apellido4")
persona1[cedula]["apellido"] = ape
nom = request.values.get("nombre4")
persona1[cedula]["nombre"] = nom
return redirect("https://saludbc3.herokuapp.com/datos/"+cedula)
else:
return render_template("modificador.html",paciente=persona1[cedula],cedula=cedula)
#persona2{
#print(database["5591945"]["nombre"])
@app.route("/datos/<string:cedula>/")
def datos(cedula):
print("EL dato es ")
print(persona1[cedula])
return render_template("datos.html",paciente=persona1[cedula],cedula=cedula)
@app.route("/datos/")
def datos1():
redireccionar="http://"+ip +"/registro"
local="https://saludbc3.herokuapp.com/registro"
print(redireccionar)
return redirect(local)
@app.route("/",methods=['GET','POST'])
def for_home():
local="https://saludbc3.herokuapp.com/datos/"
if request.method=='POST':
cedula=request.form.get('cedula')
#debemos de comparar si ya hay la cedula, si no hay aun redireccionar al /registro
if cedula in persona1: #verifica si za esta gaurdado en el diccionario
print("si esta la cedula")
return redirect("https://saludbc3.herokuapp.com/datos/"+cedula)
else:
return redirect("https://saludbc3.herokuapp.com/registro")
return render_template("index.html",cedula="5591945")
@app.route("/vacuna/<string:cedula>/")
def vacuna_html(cedula):
return render_template("vacuna.html",paciente=persona1[cedula],cedula=cedula)
@app.route("/historial/<string:cedula>/")
def historial_html(cedula):
print(persona1[cedula])
return render_template("historial.html",paciente=persona1[cedula],cedula=cedula)
@app.route("/estudio/<string:cedula>/")
def estudio_html(cedula):
print(persona1[cedula])
return render_template("estudio.html",paciente=persona1[cedula],cedula=cedula)
@app.route("/formulario",methods=['GET','POST'])
def formulario():
if request.method=='POST':
nombre=request.form.get('nombre')
apellido=request.form.get('apellido')
fecha_de_nacimiento=request.form.get('fecha_de_nacimiento')
sexo=request.form.get('sexo')
ciudad=request.form.get('ciudad')
grupo_sanguineo=request.form.get('grupo_sanguineo')
telefono=request.form.get('telefono')
cedula=request.form.get('cedula')
diccionario={}
diccionario["nombre"]= nombre
diccionario["apellido"]= apellido
diccionario["fecha_de_nacimiento"]=fecha_de_nacimiento
diccionario["sexo"]= sexo
diccionario["ciudad"]= ciudad
diccionario["grupo_sanguineo"]= grupo_sanguineo
diccionario["telefono"]= telefono
diccionario["vacunas"]=[]
persona1[cedula]=diccionario
local="https://saludbc3.herokuapp.com/datos/"
print(persona1)
return redirect(local+cedula)
else:
redireccionar="http://"+ip +"/registro/"
local="https://saludbc3.herokuapp.com/registro/"
print(redireccionar)
return redirect(local+cedula)
return render_template("registro.html")
@app.route("/registro")
def registro_html():
return render_template("registro.html")
if __name__=='__main__':
ip='192.168.1.130'
app.run(host=ip)
@app.route("/otros/<string:cedula>/")
def otros_html(cedula):
print(persona1[cedula])
return render_template("otros.html",paciente=persona1[cedula],cedula=cedula)
| 36.526531 | 123 | 0.603531 | from flask import Flask,render_template,request,redirect
import socket
app = Flask(__name__)
cedula=""
ip='192.168.1.130'
persona1 ={
"5591945": {
"nombre":"Mauricio",
"apellido":"Acosta",
"fecha_de_nacimiento":"23-12-2002",
"sexo":"m",
"ciudad":"San_Antonio",
"grupo_sanguineo":"positivo",
"telefono":"98768785",
"vacunas":[
{"fecha":"18-06-2019","vacuna_aplicada":"antigripal","proxima_vacuna":"28-10-2020"},
{"fecha":"23-03-2018","vacuna_aplicada":"antirabico","proxima_vacuna":"18-08-2019"},
{"fecha":"13-07-2019","vacuna_aplicada":"antitetanico","proxima_vacuna":"20-02-2019"}
],
"historial_medico":[
{"fecha_inicio":"02-02-2019","diagnostico_medico":"infeccion_en_las_vias_orinarias",
"estudios_realizados":"analisis_de_orina","receta":"antibioticos","fecha_culminacion":"10-02-2019"}
],
"estudios_medicos":[
{"fecha_de_inicio":"11-03-2019","diagnostico_medico":"fiebre","estudio_realizados":"analisis_de_orina",
"receta":"z-mol","proxima_consulta":"14-03-2019"}
]
},
"982145358":{
"nombre":"paula",
"apellido": "santacruz",
"fecha_de_nacimiento":"24-09-2002",
"sexo":"f",
"ciudad":"capiata",
"grupo_sanguineo":"positivo",
"telefono":"765765",
"vacunas":[
{"fecha":"10-08-2019","vacuna_aplicada":"antigripal","proxima_vacuna":"10-08-2020"},
{"fecha":"26-12-2019","vacuna_aplicada":"viruela","proxima_vacuna":"28-10-2020"},
{"fecha":"13-02-2019","vacuna_aplicada":"hepatitis","proxima_vacuna":"28-10-2020"}]
}
}
@app.route("/modificador/<string:cedula>/", methods=['GET','POST'])
def modificador(cedula):
if request.method=='POST':
tel = request.values.get("telefono1")
persona1[cedula]["telefono"] = tel
gru = request.values.get("grupo1")
persona1[cedula]["grupo_sanguineo"] = gru
lug = request.values.get("lugar1")
persona1[cedula]["ciudad"] = lug
fec = request.values.get("fecha1")
persona1[cedula]["fecha_de_nacimiento"] = fec
ape = request.values.get("apellido4")
persona1[cedula]["apellido"] = ape
nom = request.values.get("nombre4")
persona1[cedula]["nombre"] = nom
return redirect("https://saludbc3.herokuapp.com/datos/"+cedula)
else:
return render_template("modificador.html",paciente=persona1[cedula],cedula=cedula)
@app.route("/datos/<string:cedula>/")
def datos(cedula):
print("EL dato es ")
print(persona1[cedula])
return render_template("datos.html",paciente=persona1[cedula],cedula=cedula)
@app.route("/datos/")
def datos1():
redireccionar="http://"+ip +"/registro"
local="https://saludbc3.herokuapp.com/registro"
print(redireccionar)
return redirect(local)
@app.route("/",methods=['GET','POST'])
def for_home():
local="https://saludbc3.herokuapp.com/datos/"
if request.method=='POST':
cedula=request.form.get('cedula')
if cedula in persona1:
print("si esta la cedula")
return redirect("https://saludbc3.herokuapp.com/datos/"+cedula)
else:
return redirect("https://saludbc3.herokuapp.com/registro")
return render_template("index.html",cedula="5591945")
@app.route("/vacuna/<string:cedula>/")
def vacuna_html(cedula):
return render_template("vacuna.html",paciente=persona1[cedula],cedula=cedula)
@app.route("/historial/<string:cedula>/")
def historial_html(cedula):
print(persona1[cedula])
return render_template("historial.html",paciente=persona1[cedula],cedula=cedula)
@app.route("/estudio/<string:cedula>/")
def estudio_html(cedula):
print(persona1[cedula])
return render_template("estudio.html",paciente=persona1[cedula],cedula=cedula)
@app.route("/formulario",methods=['GET','POST'])
def formulario():
if request.method=='POST':
nombre=request.form.get('nombre')
apellido=request.form.get('apellido')
fecha_de_nacimiento=request.form.get('fecha_de_nacimiento')
sexo=request.form.get('sexo')
ciudad=request.form.get('ciudad')
grupo_sanguineo=request.form.get('grupo_sanguineo')
telefono=request.form.get('telefono')
cedula=request.form.get('cedula')
diccionario={}
diccionario["nombre"]= nombre
diccionario["apellido"]= apellido
diccionario["fecha_de_nacimiento"]=fecha_de_nacimiento
diccionario["sexo"]= sexo
diccionario["ciudad"]= ciudad
diccionario["grupo_sanguineo"]= grupo_sanguineo
diccionario["telefono"]= telefono
diccionario["vacunas"]=[]
persona1[cedula]=diccionario
local="https://saludbc3.herokuapp.com/datos/"
print(persona1)
return redirect(local+cedula)
else:
redireccionar="http://"+ip +"/registro/"
local="https://saludbc3.herokuapp.com/registro/"
print(redireccionar)
return redirect(local+cedula)
return render_template("registro.html")
@app.route("/registro")
def registro_html():
return render_template("registro.html")
if __name__=='__main__':
ip='192.168.1.130'
app.run(host=ip)
@app.route("/otros/<string:cedula>/")
def otros_html(cedula):
print(persona1[cedula])
return render_template("otros.html",paciente=persona1[cedula],cedula=cedula)
| true | true |
f71d196ba1b47dd3706f99db8fce3924c59d89f1 | 1,344 | py | Python | services/data/data/dependencies/aliases.py | eodcgmbh/openeo-openshift-driver | c4f256a6cd3eac358cbe32f6fcc734dc69c7b115 | [
"Apache-2.0"
] | null | null | null | services/data/data/dependencies/aliases.py | eodcgmbh/openeo-openshift-driver | c4f256a6cd3eac358cbe32f6fcc734dc69c7b115 | [
"Apache-2.0"
] | 5 | 2021-02-08T20:29:22.000Z | 2022-03-11T23:44:17.000Z | services/data/data/dependencies/aliases.py | eodcgmbh/openeo-openshift-driver | c4f256a6cd3eac358cbe32f6fcc734dc69c7b115 | [
"Apache-2.0"
] | null | null | null | """Aliases for products, to enable the finding of product with different specifiers.
** Might be deprecated, if this information is included in the CSW database **
"""
product_aliases = [
{
'product_id': 's2a_prd_msil1c',
'aliases': [
"s2a_prd_msil1c",
"sentinel2",
"sentinel 2",
"sentinel-2",
"sentinel-2a"
]
},
{
'product_id': 's2b_prd_msil1c',
'aliases': [
"s2b_prd_msil1c"
]
},
{
'product_id': 's1a_csar_grdh_iw',
'aliases': [
"s1a_csar_grdh_iw"
]
},
{
'product_id': 's1a_csar_grdm_ew',
'aliases': [
"s1a_csar_grdm_ew"
]
},
{
'product_id': 's1a_csar_slc__ew',
'aliases': [
"s1a_csar_slc__ew"
]
},
{
'product_id': 's1a_csar_slc__iw',
'aliases': [
"s1a_csar_slc__iw"
]
},
{
'product_id': 's1b_csar_grdh_ew',
'aliases': [
"s1b_csar_grdh_ew"
]
},
{
'product_id': 's1b_csar_grdh_iw',
'aliases': [
"s1b_csar_grdh_iw"
]
},
{
'product_id': 's3a_sl_1_rbt',
'aliases': [
"s3a_sl_1_rbt"
]
}
]
| 20.676923 | 84 | 0.450149 |
product_aliases = [
{
'product_id': 's2a_prd_msil1c',
'aliases': [
"s2a_prd_msil1c",
"sentinel2",
"sentinel 2",
"sentinel-2",
"sentinel-2a"
]
},
{
'product_id': 's2b_prd_msil1c',
'aliases': [
"s2b_prd_msil1c"
]
},
{
'product_id': 's1a_csar_grdh_iw',
'aliases': [
"s1a_csar_grdh_iw"
]
},
{
'product_id': 's1a_csar_grdm_ew',
'aliases': [
"s1a_csar_grdm_ew"
]
},
{
'product_id': 's1a_csar_slc__ew',
'aliases': [
"s1a_csar_slc__ew"
]
},
{
'product_id': 's1a_csar_slc__iw',
'aliases': [
"s1a_csar_slc__iw"
]
},
{
'product_id': 's1b_csar_grdh_ew',
'aliases': [
"s1b_csar_grdh_ew"
]
},
{
'product_id': 's1b_csar_grdh_iw',
'aliases': [
"s1b_csar_grdh_iw"
]
},
{
'product_id': 's3a_sl_1_rbt',
'aliases': [
"s3a_sl_1_rbt"
]
}
]
| true | true |
f71d1a1fae157f45106bae2e3970d183277b8a5c | 5,921 | py | Python | orchestra/contrib/websites/models.py | RubenPX/django-orchestra | 5ab4779e1ae12ec99569d682601b7810587ed381 | [
"Unlicense"
] | null | null | null | orchestra/contrib/websites/models.py | RubenPX/django-orchestra | 5ab4779e1ae12ec99569d682601b7810587ed381 | [
"Unlicense"
] | 4 | 2021-01-30T14:26:46.000Z | 2022-03-18T16:28:39.000Z | orchestra/contrib/websites/models.py | RubenPX/django-orchestra | 5ab4779e1ae12ec99569d682601b7810587ed381 | [
"Unlicense"
] | 3 | 2022-02-06T04:35:59.000Z | 2022-03-17T00:40:17.000Z | import os
from collections import OrderedDict
from django.db import models
from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
from orchestra.core import validators
from orchestra.utils.functional import cached
from . import settings
from .directives import SiteDirective
class Website(models.Model):
""" Models a web site, also known as virtual host """
HTTP = 'http'
HTTPS = 'https'
HTTP_AND_HTTPS = 'http/https'
HTTPS_ONLY = 'https-only'
name = models.CharField(_("name"), max_length=128,
validators=[validators.validate_name])
account = models.ForeignKey('accounts.Account', on_delete=models.CASCADE,
verbose_name=_("Account"), related_name='websites')
protocol = models.CharField(_("protocol"), max_length=16,
choices=settings.WEBSITES_PROTOCOL_CHOICES,
default=settings.WEBSITES_DEFAULT_PROTOCOL,
help_text=_("Select the protocol(s) for this website<br>"
"<tt>HTTPS only</tt> performs a redirection from <tt>http</tt> to <tt>https</tt>."))
# port = models.PositiveIntegerField(_("port"),
# choices=settings.WEBSITES_PORT_CHOICES,
# default=settings.WEBSITES_DEFAULT_PORT)
domains = models.ManyToManyField(settings.WEBSITES_DOMAIN_MODEL, blank=True,
related_name='websites', verbose_name=_("domains"))
contents = models.ManyToManyField('webapps.WebApp', through='websites.Content')
target_server = models.ForeignKey('orchestration.Server', on_delete=models.CASCADE,
verbose_name=_("Target Server"), related_name='websites')
is_active = models.BooleanField(_("active"), default=True)
comments = models.TextField(default="", blank=True)
class Meta:
unique_together = ('name', 'account')
def __str__(self):
return self.name
@property
def unique_name(self):
context = self.get_settings_context()
return settings.WEBSITES_UNIQUE_NAME_FORMAT % context
@cached_property
def active(self):
return self.is_active and self.account.is_active
def disable(self):
self.is_active = False
self.save(update_fields=('is_active',))
def enable(self):
self.is_active = False
self.save(update_fields=('is_active',))
def get_settings_context(self):
""" format settings strings """
return {
'id': self.id,
'pk': self.pk,
'home': self.get_user().get_home(),
'user': self.get_username(),
'group': self.get_groupname(),
'site_name': self.name,
'protocol': self.protocol,
}
def get_protocol(self):
if self.protocol in (self.HTTP, self.HTTP_AND_HTTPS):
return self.HTTP
return self.HTTPS
@cached
def get_directives(self):
directives = OrderedDict()
for opt in self.directives.all().order_by('name', 'value'):
try:
directives[opt.name].append(opt.value)
except KeyError:
directives[opt.name] = [opt.value]
return directives
def get_absolute_url(self):
try:
domain = self.domains.all()[0]
except IndexError:
return
else:
return '%s://%s' % (self.get_protocol(), domain)
def get_user(self):
return self.account.main_systemuser
def get_username(self):
return self.get_user().username
def get_groupname(self):
return self.get_username()
def get_www_access_log_path(self):
context = self.get_settings_context()
context['unique_name'] = self.unique_name
path = settings.WEBSITES_WEBSITE_WWW_ACCESS_LOG_PATH % context
return os.path.normpath(path)
def get_www_error_log_path(self):
context = self.get_settings_context()
context['unique_name'] = self.unique_name
path = settings.WEBSITES_WEBSITE_WWW_ERROR_LOG_PATH % context
return os.path.normpath(path)
class WebsiteDirective(models.Model):
website = models.ForeignKey(Website, on_delete=models.CASCADE,
verbose_name=_("web site"), related_name='directives')
name = models.CharField(_("name"), max_length=128, db_index=True,
choices=SiteDirective.get_choices())
value = models.CharField(_("value"), max_length=256, blank=True)
def __str__(self):
return self.name
@cached_property
def directive_class(self):
return SiteDirective.get(self.name)
@cached_property
def directive_instance(self):
""" Per request lived directive instance """
return self.directive_class()
def clean(self):
self.directive_instance.validate(self)
class Content(models.Model):
# related_name is content_set to differentiate between website.content -> webapp
webapp = models.ForeignKey('webapps.WebApp', on_delete=models.CASCADE,
verbose_name=_("web application"))
website = models.ForeignKey('websites.Website', on_delete=models.CASCADE,
verbose_name=_("web site"))
path = models.CharField(_("path"), max_length=256, blank=True,
validators=[validators.validate_url_path])
class Meta:
unique_together = ('website', 'path')
def __str__(self):
try:
return self.website.name + self.path
except Website.DoesNotExist:
return self.path
def clean_fields(self, *args, **kwargs):
self.path = self.path.strip()
return super(Content, self).clean_fields(*args, **kwargs)
def clean(self):
if not self.path:
self.path = '/'
def get_absolute_url(self):
try:
domain = self.website.domains.all()[0]
except IndexError:
return
else:
return '%s://%s%s' % (self.website.get_protocol(), domain, self.path)
| 33.264045 | 104 | 0.65141 | import os
from collections import OrderedDict
from django.db import models
from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
from orchestra.core import validators
from orchestra.utils.functional import cached
from . import settings
from .directives import SiteDirective
class Website(models.Model):
HTTP = 'http'
HTTPS = 'https'
HTTP_AND_HTTPS = 'http/https'
HTTPS_ONLY = 'https-only'
name = models.CharField(_("name"), max_length=128,
validators=[validators.validate_name])
account = models.ForeignKey('accounts.Account', on_delete=models.CASCADE,
verbose_name=_("Account"), related_name='websites')
protocol = models.CharField(_("protocol"), max_length=16,
choices=settings.WEBSITES_PROTOCOL_CHOICES,
default=settings.WEBSITES_DEFAULT_PROTOCOL,
help_text=_("Select the protocol(s) for this website<br>"
"<tt>HTTPS only</tt> performs a redirection from <tt>http</tt> to <tt>https</tt>."))
domains = models.ManyToManyField(settings.WEBSITES_DOMAIN_MODEL, blank=True,
related_name='websites', verbose_name=_("domains"))
contents = models.ManyToManyField('webapps.WebApp', through='websites.Content')
target_server = models.ForeignKey('orchestration.Server', on_delete=models.CASCADE,
verbose_name=_("Target Server"), related_name='websites')
is_active = models.BooleanField(_("active"), default=True)
comments = models.TextField(default="", blank=True)
class Meta:
unique_together = ('name', 'account')
def __str__(self):
return self.name
@property
def unique_name(self):
context = self.get_settings_context()
return settings.WEBSITES_UNIQUE_NAME_FORMAT % context
@cached_property
def active(self):
return self.is_active and self.account.is_active
def disable(self):
self.is_active = False
self.save(update_fields=('is_active',))
def enable(self):
self.is_active = False
self.save(update_fields=('is_active',))
def get_settings_context(self):
return {
'id': self.id,
'pk': self.pk,
'home': self.get_user().get_home(),
'user': self.get_username(),
'group': self.get_groupname(),
'site_name': self.name,
'protocol': self.protocol,
}
def get_protocol(self):
if self.protocol in (self.HTTP, self.HTTP_AND_HTTPS):
return self.HTTP
return self.HTTPS
@cached
def get_directives(self):
directives = OrderedDict()
for opt in self.directives.all().order_by('name', 'value'):
try:
directives[opt.name].append(opt.value)
except KeyError:
directives[opt.name] = [opt.value]
return directives
def get_absolute_url(self):
try:
domain = self.domains.all()[0]
except IndexError:
return
else:
return '%s://%s' % (self.get_protocol(), domain)
def get_user(self):
return self.account.main_systemuser
def get_username(self):
return self.get_user().username
def get_groupname(self):
return self.get_username()
def get_www_access_log_path(self):
context = self.get_settings_context()
context['unique_name'] = self.unique_name
path = settings.WEBSITES_WEBSITE_WWW_ACCESS_LOG_PATH % context
return os.path.normpath(path)
def get_www_error_log_path(self):
context = self.get_settings_context()
context['unique_name'] = self.unique_name
path = settings.WEBSITES_WEBSITE_WWW_ERROR_LOG_PATH % context
return os.path.normpath(path)
class WebsiteDirective(models.Model):
website = models.ForeignKey(Website, on_delete=models.CASCADE,
verbose_name=_("web site"), related_name='directives')
name = models.CharField(_("name"), max_length=128, db_index=True,
choices=SiteDirective.get_choices())
value = models.CharField(_("value"), max_length=256, blank=True)
def __str__(self):
return self.name
@cached_property
def directive_class(self):
return SiteDirective.get(self.name)
@cached_property
def directive_instance(self):
return self.directive_class()
def clean(self):
self.directive_instance.validate(self)
class Content(models.Model):
webapp = models.ForeignKey('webapps.WebApp', on_delete=models.CASCADE,
verbose_name=_("web application"))
website = models.ForeignKey('websites.Website', on_delete=models.CASCADE,
verbose_name=_("web site"))
path = models.CharField(_("path"), max_length=256, blank=True,
validators=[validators.validate_url_path])
class Meta:
unique_together = ('website', 'path')
def __str__(self):
try:
return self.website.name + self.path
except Website.DoesNotExist:
return self.path
def clean_fields(self, *args, **kwargs):
self.path = self.path.strip()
return super(Content, self).clean_fields(*args, **kwargs)
def clean(self):
if not self.path:
self.path = '/'
def get_absolute_url(self):
try:
domain = self.website.domains.all()[0]
except IndexError:
return
else:
return '%s://%s%s' % (self.website.get_protocol(), domain, self.path)
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.