text
stringlengths 12
1.05M
| repo_name
stringlengths 5
86
| path
stringlengths 4
191
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 12
1.05M
| keyword
listlengths 1
23
| text_hash
stringlengths 64
64
|
|---|---|---|---|---|---|---|---|
from typing import Dict, Iterable, Optional
from iota import AdapterSpec, Address, BundleHash, ProposedTransaction, Tag, \
TransactionHash, TransactionTrytes, TryteString, TrytesCompatible
from iota.adapter import BaseAdapter, resolve_adapter
from iota.commands import CustomCommand, core, extended
from iota.crypto.addresses import AddressGenerator
from iota.crypto.types import Seed
__all__ = [
'AsyncIota',
'AsyncStrictIota',
]
class AsyncStrictIota:
"""
Asynchronous API to send HTTP requests for communicating with an IOTA node.
This implementation only exposes the "core" API methods. For a more
feature-complete implementation, use :py:class:`AsyncIota` instead.
References:
- https://docs.iota.org/docs/node-software/0.1/iri/references/api-reference
:param AdapterSpec adapter:
URI string or BaseAdapter instance.
:param Optional[bool] devnet:
Whether to use devnet settings for this instance.
On the devnet, minimum weight magnitude is set to 9, on mainnet
it is 1 by default.
:param Optional[bool] local_pow:
Whether to perform proof-of-work locally by redirecting all calls
to :py:meth:`attach_to_tangle` to
`ccurl pow interface <https://pypi.org/project/PyOTA-PoW/>`_.
See :ref:`README:Optional Local Pow` for more info and
:ref:`find out<pow-label>` how to use it.
"""
def __init__(
self,
adapter: AdapterSpec,
devnet: bool = False,
local_pow: bool = False
) -> None:
"""
:param AdapterSpec adapter:
URI string or BaseAdapter instance.
:param bool devnet:
Whether to use devnet settings for this instance.
On the devnet, minimum weight magnitude is set to 9, on mainnet
it is 1 by default.
:param Optional[bool] local_pow:
Whether to perform proof-of-work locally by redirecting all calls
to :py:meth:`attach_to_tangle` to
`ccurl pow interface <https://pypi.org/project/PyOTA-PoW/>`_.
See :ref:`README:Optional Local Pow` for more info and
:ref:`find out<pow-label>` how to use it.
"""
super().__init__()
if not isinstance(adapter, BaseAdapter):
adapter = resolve_adapter(adapter)
self.adapter: BaseAdapter = adapter
# Note that the `local_pow` parameter is passed to adapter,
# the api class has no notion about it. The reason being,
# that this parameter is used in `AttachToTangeCommand` calls,
# that is called from various api calls (`attach_to_tangle`,
# `send_trytes` or `send_transfer`). Inside `AttachToTangeCommand`,
# we no longer have access to the attributes of the API class, therefore
# `local_pow` needs to be associated with the adapter.
# Logically, `local_pow` will decide if the api call does pow
# via pyota-pow extension, or sends the request to a node.
# But technically, the parameter belongs to the adapter.
self.adapter.set_local_pow(local_pow)
self.devnet = devnet
def create_command(self, command: str) -> CustomCommand:
"""
Creates a pre-configured CustomCommand instance.
This method is useful for invoking undocumented or experimental
methods, or if you just want to troll your node for awhile.
:param str command:
The name of the command to create.
"""
return CustomCommand(self.adapter, command)
def set_local_pow(self, local_pow: bool) -> None:
"""
Sets the :py:attr:`local_pow` attribute of the adapter of the api
instance. If it is ``True``, :py:meth:`~Iota.attach_to_tangle` command calls
external interface to perform proof of work, instead of sending the
request to a node.
By default, :py:attr:`local_pow` is set to ``False``.
This particular method is needed if one wants to change
local_pow behavior dynamically.
:param bool local_pow:
Whether to perform pow locally.
:returns: None
"""
self.adapter.set_local_pow(local_pow)
@property
def default_min_weight_magnitude(self) -> int:
"""
Returns the default ``min_weight_magnitude`` value to use for
API requests.
"""
return 9 if self.devnet else 14
async def add_neighbors(self, uris: Iterable[str]) -> dict:
"""
Add one or more neighbors to the node. Lasts until the node is
restarted.
:param Iterable[str] uris:
Use format ``<protocol>://<ip address>:<port>``.
Example: ``add_neighbors(['udp://example.com:14265'])``
.. note::
These URIs are for node-to-node communication (e.g.,
weird things will happen if you specify a node's HTTP
API URI here).
:return:
``dict`` with the following structure::
{
'addedNeighbors': int,
Total number of added neighbors.
'duration': int,
Number of milliseconds it took to complete the request.
}
References:
- https://docs.iota.org/docs/node-software/0.1/iri/references/api-reference#addneighbors
"""
return await core.AddNeighborsCommand(self.adapter)(uris=uris)
async def attach_to_tangle(
self,
trunk_transaction: TransactionHash,
branch_transaction: TransactionHash,
trytes: Iterable[TryteString],
min_weight_magnitude: Optional[int] = None,
) -> dict:
"""
Attaches the specified transactions (trytes) to the Tangle by
doing Proof of Work. You need to supply branchTransaction as
well as trunkTransaction (basically the tips which you're going
to validate and reference with this transaction) - both of which
you'll get through the :py:meth:`get_transactions_to_approve` API call.
The returned value is a different set of tryte values which you
can input into :py:meth:`broadcast_transactions` and
:py:meth:`store_transactions`.
:param TransactionHash trunk_transaction:
Trunk transaction hash.
:param TransactionHash branch_transaction:
Branch transaction hash.
:param Iterable[TransactionTrytes] trytes:
List of transaction trytes in the bundle to be attached.
:param Optional[int] min_weight_magnitude:
Minimum weight magnitude to be used for attaching trytes.
14 by default on mainnet, 9 on devnet/devnet.
:return:
``dict`` with the following structure::
{
'trytes': List[TransactionTrytes],
Transaction trytes that include a valid nonce field.
}
References:
- https://docs.iota.org/docs/node-software/0.1/iri/references/api-reference#attachtotangle
"""
if min_weight_magnitude is None:
min_weight_magnitude = self.default_min_weight_magnitude
return await core.AttachToTangleCommand(self.adapter)(
trunkTransaction=trunk_transaction,
branchTransaction=branch_transaction,
minWeightMagnitude=min_weight_magnitude,
trytes=trytes,
)
async def broadcast_transactions(self, trytes: Iterable[TryteString]) -> dict:
"""
Broadcast a list of transactions to all neighbors.
The input trytes for this call are provided by
:py:meth:`attach_to_tangle`.
:param Iterable[TransactionTrytes] trytes:
List of transaction trytes to be broadcast.
:return:
``dict`` with the following structure::
{
'duration': int,
Number of milliseconds it took to complete the request.
}
References:
- https://docs.iota.org/docs/node-software/0.1/iri/references/api-reference#broadcasttransactions
"""
return await core.BroadcastTransactionsCommand(self.adapter)(trytes=trytes)
async def check_consistency(self, tails: Iterable[TransactionHash]) -> dict:
"""
Used to ensure tail resolves to a consistent ledger which is
necessary to validate before attempting promotion. Checks
transaction hashes for promotability.
This is called with a pending transaction (or more of them) and
it will tell you if it is still possible for this transaction
(or all the transactions simultaneously if you give more than
one) to be confirmed, or not (because it conflicts with another
already confirmed transaction).
:param Iterable[TransactionHash] tails:
Transaction hashes. Must be tail transactions.
:return:
``dict`` with the following structure::
{
'state': bool,
Whether tails resolve to consistent ledger.
'info': str,
This field will only exist if 'state' is ``False``.
}
References:
- https://docs.iota.org/docs/node-software/0.1/iri/references/api-reference#checkconsistency
"""
return await core.CheckConsistencyCommand(self.adapter)(
tails=tails,
)
async def find_transactions(
self,
bundles: Optional[Iterable[BundleHash]] = None,
addresses: Optional[Iterable[Address]] = None,
tags: Optional[Iterable[Tag]] = None,
approvees: Optional[Iterable[TransactionHash]] = None,
) -> dict:
"""
Find the transactions which match the specified input and
return.
All input values are lists, for which a list of return values
(transaction hashes), in the same order, is returned for all
individual elements.
Using multiple of these input fields returns the intersection of
the values.
:param Optional[Iterable[BundleHash] bundles:
List of bundle IDs.
:param Optional[Iterable[Address]] addresses:
List of addresses.
:param Optional[Iterable[Tag]] tags:
List of tags.
:param Optional[Iterable[TransactionHash]] approvees:
List of approvee transaction IDs.
:return:
``dict`` with the following structure::
{
'hashes': List[TransationHash],
Found transactions.
}
References:
- https://docs.iota.org/docs/node-software/0.1/iri/references/api-reference#findtransactions
"""
return await core.FindTransactionsCommand(self.adapter)(
bundles=bundles,
addresses=addresses,
tags=tags,
approvees=approvees,
)
async def get_balances(
self,
addresses: Iterable[Address],
tips: Optional[Iterable[TransactionHash]] = None,
) -> dict:
"""
Returns the confirmed balance which a list of addresses have at the
latest confirmed milestone.
In addition to the balances, it also returns the milestone as
well as the index with which the confirmed balance was
determined. The balances are returned as a list in the same
order as the addresses were provided as input.
:param Iterable[Address] addresses:
List of addresses to get the confirmed balance for.
:param Optional[Iterable[TransactionHash]] tips:
Tips whose history of transactions to traverse to find the balance.
:return:
``dict`` with the following structure::
{
'balances': List[int],
List of balances in the same order as the addresses
parameters that were passed to the endpoint.
'references': List[TransactionHash],
The referencing tips. If no tips parameter was passed
to the endpoint, this field contains the hash of the
latest milestone that confirmed the balance.
'milestoneIndex': int,
The index of the milestone that confirmed the most
recent balance.
'duration': int,
Number of milliseconds it took to process the request.
}
References:
- https://docs.iota.org/docs/node-software/0.1/iri/references/api-reference#getbalances
"""
return await core.GetBalancesCommand(self.adapter)(
addresses=addresses,
tips=tips,
)
async def get_inclusion_states(
self,
transactions: Iterable[TransactionHash],
) -> dict:
"""
Get the inclusion states of a set of transactions. This is for
determining if a transaction was accepted and confirmed by the
network or not.
:param Iterable[TransactionHash] transactions:
List of transactions you want to get the inclusion state
for.
:return:
``dict`` with the following structure::
{
'states': List[bool],
List of boolean values in the same order as the
transactions parameters. A ``True`` value means the
transaction was confirmed.
'duration': int,
Number of milliseconds it took to process the request.
}
References:
- https://docs.iota.org/docs/node-software/0.1/iri/references/api-reference#getinclusionstates
"""
return await core.GetInclusionStatesCommand(self.adapter)(
transactions=transactions,
)
# Add an alias, more descriptive
is_confirmed = get_inclusion_states
async def get_missing_transactions(self) -> dict:
"""
Returns all transaction hashes that a node is currently requesting
from its neighbors.
:return:
``dict`` with the following structure::
{
'hashes': List[TransactionHash],
Array of missing transaction hashes.
'duration': int,
Number of milliseconds it took to process the request.
}
References:
- https://docs.iota.org/docs/node-software/0.1/iri/references/api-reference#getmissingtransactions
"""
return await core.GetMissingTransactionsCommand(self.adapter)()
async def get_neighbors(self) -> dict:
"""
Returns the set of neighbors the node is connected with, as well
as their activity count.
The activity counter is reset after restarting IRI.
:return:
``dict`` with the following structure::
{
'neighbors': List[dict],
Array of objects, including the following fields with
example values:
"address": "/8.8.8.8:14265",
"numberOfAllTransactions": 158,
"numberOfRandomTransactionRequests": 271,
"numberOfNewTransactions": 956,
"numberOfInvalidTransactions": 539,
"numberOfStaleTransactions": 663,
"numberOfSentTransactions": 672,
"connectiontype": "TCP"
'duration': int,
Number of milliseconds it took to process the request.
}
References:
- https://docs.iota.org/docs/node-software/0.1/iri/references/api-reference#getneighbors
"""
return await core.GetNeighborsCommand(self.adapter)()
async def get_node_api_configuration(self) -> dict:
"""
Returns a node's API configuration settings.
:return:
``dict`` with the following structure::
{
'<API-config-settings>': type,
Configuration parameters for a node.
...
...
...
}
References:
- https://docs.iota.org/docs/node-software/0.1/iri/references/iri-configuration-options
- https://docs.iota.org/docs/node-software/0.1/iri/references/api-reference#getnodeapiconfiguration
"""
return await core.GetNodeAPIConfigurationCommand(self.adapter)()
async def get_node_info(self) -> dict:
"""
Returns information about the node.
:return:
``dict`` with the following structure::
{
'appName': str,
Name of the IRI network.
'appVersion': str,
Version of the IRI.
'jreAvailableProcessors': int,
Available CPU cores on the node.
'jreFreeMemory': int,
Amount of free memory in the Java virtual machine.
'jreMaxMemory': int,
Maximum amount of memory that the Java virtual machine
can use,
'jreTotalMemory': int,
Total amount of memory in the Java virtual machine.
'jreVersion': str,
The version of the Java runtime environment.
'latestMilestone': TransactionHash
Transaction hash of the latest milestone.
'latestMilestoneIndex': int,
Index of the latest milestone.
'latestSolidSubtangleMilestone': TransactionHash,
Transaction hash of the latest solid milestone.
'latestSolidSubtangleMilestoneIndex': int,
Index of the latest solid milestone.
'milestoneStartIndex': int,
Start milestone for the current version of the IRI.
'neighbors': int,
Total number of connected neighbor nodes.
'packetsQueueSize': int,
Size of the packet queue.
'time': int,
Current UNIX timestamp.
'tips': int,
Number of tips in the network.
'transactionsToRequest': int,
Total number of transactions that the node is missing in
its ledger.
'features': List[str],
Enabled configuration options.
'coordinatorAddress': Address,
Address (Merkle root) of the Coordinator.
'duration': int,
Number of milliseconds it took to process the request.
}
References:
- https://docs.iota.org/docs/node-software/0.1/iri/references/api-reference#getnodeinfo
"""
return await core.GetNodeInfoCommand(self.adapter)()
async def get_transactions_to_approve(
self,
depth: int,
reference: Optional[TransactionHash] = None,
) -> dict:
"""
Tip selection which returns ``trunkTransaction`` and
``branchTransaction``.
:param int depth:
Number of milestones to go back to start the tip selection algorithm.
The higher the depth value, the more "babysitting" the node
will perform for the network (as it will confirm more
transactions that way).
:param TransactionHash reference:
Transaction hash from which to start the weighted random walk.
Use this parameter to make sure the returned tip transaction hashes
approve a given reference transaction.
:return:
``dict`` with the following structure::
{
'trunkTransaction': TransactionHash,
Valid trunk transaction hash.
'branchTransaction': TransactionHash,
Valid branch transaction hash.
'duration': int,
Number of milliseconds it took to complete the request.
}
References:
- https://docs.iota.org/docs/node-software/0.1/iri/references/api-reference#gettransactionstoapprove
"""
return await core.GetTransactionsToApproveCommand(self.adapter)(
depth=depth,
reference=reference,
)
async def get_trytes(self, hashes: Iterable[TransactionHash]) -> dict:
"""
Returns the raw transaction data (trytes) of one or more
transactions.
:return:
``dict`` with the following structure::
{
'trytes': List[TransactionTrytes],
List of transaction trytes for the given transaction
hashes (in the same order as the parameters).
'duration': int,
Number of milliseconds it took to complete the request.
}
.. note::
If a node doesn't have the trytes for a given transaction hash in
its ledger, the value at the index of that transaction hash is either
``null`` or a string of 9s.
References:
- https://docs.iota.org/docs/node-software/0.1/iri/references/api-reference#gettrytes
"""
return await core.GetTrytesCommand(self.adapter)(hashes=hashes)
async def interrupt_attaching_to_tangle(self) -> dict:
"""
Interrupts and completely aborts the :py:meth:`attach_to_tangle`
process.
:return:
``dict`` with the following structure::
{
'duration': int,
Number of milliseconds it took to complete the request.
}
References:
- https://docs.iota.org/docs/node-software/0.1/iri/references/api-reference#interruptattachingtotangle
"""
return await core.InterruptAttachingToTangleCommand(self.adapter)()
async def remove_neighbors(self, uris: Iterable[str]) -> dict:
"""
Removes one or more neighbors from the node. Lasts until the
node is restarted.
:param str uris:
Use format ``<protocol>://<ip address>:<port>``.
Example: `remove_neighbors(['udp://example.com:14265'])`
:return:
``dict`` with the following structure::
{
'removedNeighbors': int,
Total number of removed neighbors.
'duration': int,
Number of milliseconds it took to complete the request.
}
References:
- https://docs.iota.org/docs/node-software/0.1/iri/references/api-reference#removeneighbors
"""
return await core.RemoveNeighborsCommand(self.adapter)(uris=uris)
async def store_transactions(self, trytes: Iterable[TryteString]) -> dict:
"""
Store transactions into local storage of the node.
The input trytes for this call are provided by
:py:meth:`attach_to_tangle`.
:param TransactionTrytes trytes:
Valid transaction trytes returned by :py:meth:`attach_to_tangle`.
:return:
``dict`` with the following structure::
{
'trytes': TransactionTrytes,
Stored trytes.
'duration': int,
Number of milliseconds it took to complete the request.
}
References:
- https://docs.iota.org/docs/node-software/0.1/iri/references/api-reference#storetransactions
"""
return await core.StoreTransactionsCommand(self.adapter)(trytes=trytes)
async def were_addresses_spent_from(
self,
addresses: Iterable[Address]
) -> dict:
"""
Check if a list of addresses was ever spent from, in the current
epoch, or in previous epochs.
If an address has a pending transaction, it's also considered 'spent'.
:param Iterable[Address] addresses:
List of addresses to check.
:return:
``dict`` with the following structure::
{
'states': List[bool],
States of the specified addresses in the same order as
the values in the addresses parameter. A ``True`` value
means that the address has been spent from.
'duration': int,
Number of milliseconds it took to complete the request.
}
References:
- https://docs.iota.org/docs/node-software/0.1/iri/references/api-reference#wereaddressesspentfrom
"""
return await core.WereAddressesSpentFromCommand(self.adapter)(
addresses=addresses,
)
class AsyncIota(AsyncStrictIota):
"""
Implements the async core API, plus additional async wrapper methods for
common operations.
:param AdapterSpec adapter:
URI string or BaseAdapter instance.
:param Optional[Seed] seed:
Seed used to generate new addresses.
If not provided, a random one will be generated.
.. note::
This value is never transferred to the node/network.
:param Optional[bool] devnet:
Whether to use devnet settings for this instance.
On the devnet, minimum weight magnitude is decreased, on mainnet
it is 14 by default.
For more info on the Mainnet and the Devnet, visit
`the official docs site<https://docs.iota.org/docs/getting-started/0.1/network/iota-networks/>`.
:param Optional[bool] local_pow:
Whether to perform proof-of-work locally by redirecting all calls
to :py:meth:`attach_to_tangle` to
`ccurl pow interface <https://pypi.org/project/PyOTA-PoW/>`_.
See :ref:`README:Optional Local Pow` for more info and
:ref:`find out<pow-label>` how to use it.
References:
- https://docs.iota.org/docs/node-software/0.1/iri/references/api-reference
- https://github.com/iotaledger/wiki/blob/master/api-proposal.md
"""
def __init__(
self,
adapter: AdapterSpec,
seed: Optional[TrytesCompatible] = None,
devnet: bool = False,
local_pow: bool = False
) -> None:
"""
:param seed:
Seed used to generate new addresses.
If not provided, a random one will be generated.
.. note::
This value is never transferred to the node/network.
"""
super().__init__(adapter, devnet, local_pow)
self.seed = Seed(seed) if seed else Seed.random()
async def broadcast_and_store(
self,
trytes: Iterable[TransactionTrytes]
) -> dict:
"""
Broadcasts and stores a set of transaction trytes.
:param Iterable[TransactionTrytes] trytes:
Transaction trytes to broadcast and store.
:return:
``dict`` with the following structure::
{
'trytes': List[TransactionTrytes],
List of TransactionTrytes that were broadcast.
Same as the input ``trytes``.
}
References:
- https://github.com/iotaledger/wiki/blob/master/api-proposal.md#broadcastandstore
"""
return await extended.BroadcastAndStoreCommand(self.adapter)(
trytes=trytes,
)
async def broadcast_bundle(
self,
tail_transaction_hash: TransactionHash
) -> dict:
"""
Re-broadcasts all transactions in a bundle given the tail transaction hash.
It might be useful when transactions did not properly propagate,
particularly in the case of large bundles.
:param TransactionHash tail_transaction_hash:
Tail transaction hash of the bundle.
:return:
``dict`` with the following structure::
{
'trytes': List[TransactionTrytes],
List of TransactionTrytes that were broadcast.
}
References:
- https://github.com/iotaledger/iota.js/blob/next/api_reference.md#module_core.broadcastBundle
"""
return await extended.BroadcastBundleCommand(self.adapter)(
tail_hash=tail_transaction_hash,
)
async def find_transaction_objects(
self,
bundles: Optional[Iterable[BundleHash]] = None,
addresses: Optional[Iterable[Address]] = None,
tags: Optional[Iterable[Tag]] = None,
approvees: Optional[Iterable[TransactionHash]] = None,
) -> dict:
"""
A more extensive version of :py:meth:`find_transactions` that
returns transaction objects instead of hashes.
Effectively, this is :py:meth:`find_transactions` +
:py:meth:`get_trytes` + converting the trytes into
transaction objects.
It accepts the same parameters as :py:meth:`find_transactions`.
Find the transactions which match the specified input.
All input values are lists, for which a list of return values
(transaction hashes), in the same order, is returned for all
individual elements. Using multiple of these input fields returns the
intersection of the values.
:param Optional[Iterable[BundleHash]] bundles:
List of bundle IDs.
:param Optional[Iterable[Address]] addresses:
List of addresses.
:param Optional[Iterable[Tag]] tags:
List of tags.
:param Optional[Iterable[TransactionHash]] approvees:
List of approvee transaction IDs.
:return:
``dict`` with the following structure::
{
'transactions': List[Transaction],
List of Transaction objects that match the input.
}
"""
return await extended.FindTransactionObjectsCommand(self.adapter)(
bundles=bundles,
addresses=addresses,
tags=tags,
approvees=approvees,
)
async def get_account_data(
self,
start: int = 0,
stop: Optional[int] = None,
inclusion_states: bool = False,
security_level: Optional[int] = None
) -> dict:
"""
More comprehensive version of :py:meth:`get_transfers` that
returns addresses and account balance in addition to bundles.
This function is useful in getting all the relevant information
of your account.
:param int start:
Starting key index.
:param Optional[int] stop:
Stop before this index.
Note that this parameter behaves like the ``stop`` attribute
in a :py:class:`slice` object; the stop index is *not*
included in the result.
If ``None`` (default), then this method will check every
address until it finds one that is unused.
.. note::
An unused address is an address that **has not been spent from**
and **has no transactions** referencing it on the Tangle.
A snapshot removes transactions from the Tangle. As a
consequence, after a snapshot, it may happen that this API does
not return the correct account data with ``stop`` being ``None``.
As a workaround, you can save your used addresses and their
``key_index`` attribute in a local database. Use the
``start`` and ``stop`` parameters to tell the API from where to
start checking and where to stop.
:param bool inclusion_states:
Whether to also fetch the inclusion states of the transfers.
This requires an additional API call to the node, so it is
disabled by default.
:param Optional[int] security_level:
Number of iterations to use when generating new addresses
(see :py:meth:`get_new_addresses`).
This value must be between 1 and 3, inclusive.
If not set, defaults to
:py:attr:`AddressGenerator.DEFAULT_SECURITY_LEVEL`.
:return:
``dict`` with the following structure::
{
'addresses': List[Address],
List of generated addresses.
Note that this list may include unused
addresses.
'balance': int,
Total account balance. Might be 0.
'bundles': List[Bundle],
List of bundles with transactions to/from this
account.
}
"""
return await extended.GetAccountDataCommand(self.adapter)(
seed=self.seed,
start=start,
stop=stop,
inclusionStates=inclusion_states,
security_level=security_level
)
async def get_bundles(
self,
transactions: Iterable[TransactionHash]
) -> dict:
"""
Returns the bundle(s) associated with the specified transaction
hashes.
:param Iterable[TransactionHash] transactions:
Transaction hashes. Must be a tail transaction.
:return:
``dict`` with the following structure::
{
'bundles': List[Bundle],
List of matching bundles. Note that this value is
always a list, even if only one bundle was found.
}
:raise :py:class:`iota.adapter.BadApiResponse`:
- if any of the bundles fails validation.
- if any of the bundles is not visible on the Tangle.
References:
- https://github.com/iotaledger/wiki/blob/master/api-proposal.md#getbundle
"""
return await extended.GetBundlesCommand(self.adapter)(
transactions=transactions,
)
async def get_inputs(
self,
start: int = 0,
stop: Optional[int] = None,
threshold: Optional[int] = None,
security_level: Optional[int] = None,
) -> dict:
"""
Gets all possible inputs of a seed and returns them, along with
the total balance.
This is either done deterministically (by generating all
addresses until :py:meth:`find_transactions` returns an empty
result), or by providing a key range to search.
:param int start:
Starting key index.
Defaults to 0.
:param Optional[int] stop:
Stop before this index.
Note that this parameter behaves like the ``stop`` attribute
in a :py:class:`slice` object; the stop index is *not*
included in the result.
If ``None`` (default), then this method will not stop until
it finds an unused address.
.. note::
An unused address is an address that **has not been spent from**
and **has no transactions** referencing it on the Tangle.
A snapshot removes transactions from the Tangle. As a
consequence, after a snapshot, it may happen that this API does
not return the correct inputs with ``stop`` being ``None``.
As a workaround, you can save your used addresses and their
``key_index`` attribute in a local database. Use the
``start`` and ``stop`` parameters to tell the API from where to
start checking for inputs and where to stop.
:param Optional[int] threshold:
If set, determines the minimum threshold for a successful
result:
- As soon as this threshold is reached, iteration will stop.
- If the command runs out of addresses before the threshold
is reached, an exception is raised.
.. note::
This method does not attempt to "optimize" the result
(e.g., smallest number of inputs, get as close to
``threshold`` as possible, etc.); it simply accumulates
inputs in order until the threshold is met.
If ``threshold`` is 0, the first address in the key range
with a non-zero balance will be returned (if it exists).
If ``threshold`` is ``None`` (default), this method will
return **all** inputs in the specified key range.
:param Optional[int] security_level:
Number of iterations to use when generating new addresses
(see :py:meth:`get_new_addresses`).
This value must be between 1 and 3, inclusive.
If not set, defaults to
:py:attr:`AddressGenerator.DEFAULT_SECURITY_LEVEL`.
:return:
``dict`` with the following structure::
{
'inputs': List[Address],
Addresses with nonzero balances that can be used
as inputs.
'totalBalance': int,
Aggregate balance from all matching addresses.
}
Note that each :py:class:`Address` in the result has its
:py:attr:`Address.balance` attribute set.
Example:
.. code-block:: python
response = iota.get_inputs(...)
input0 = response['inputs'][0] # type: Address
input0.balance # 42
:raise:
- :py:class:`iota.adapter.BadApiResponse` if ``threshold``
is not met. Not applicable if ``threshold`` is ``None``.
References:
- https://github.com/iotaledger/wiki/blob/master/api-proposal.md#getinputs
"""
return await extended.GetInputsCommand(self.adapter)(
seed=self.seed,
start=start,
stop=stop,
threshold=threshold,
securityLevel=security_level
)
async def get_new_addresses(
self,
index: int = 0,
count: int = 1,
security_level: int = AddressGenerator.DEFAULT_SECURITY_LEVEL,
checksum: bool = False,
):
"""
Generates one or more new addresses from the seed.
:param int index:
The key index of the first new address to generate (must be
>= 0).
:param int count:
Number of addresses to generate (must be >= 1).
.. tip::
This is more efficient than calling :py:meth:`get_new_addresses`
inside a loop.
If ``None``, this method will progressively generate
addresses and scan the Tangle until it finds one that has no
transactions referencing it and was never spent from.
.. note::
A snapshot removes transactions from the Tangle. As a
consequence, after a snapshot, it may happen that when ``count``
is ``None``, this API call returns a "new" address that used to
have transactions before the snapshot.
As a workaround, you can save your used addresses and their
``key_index`` attribute in a local database. Use the
``index`` parameter to tell the API from where to start
generating and checking new addresses.
:param int security_level:
Number of iterations to use when generating new addresses.
Larger values take longer, but the resulting signatures are
more secure.
This value must be between 1 and 3, inclusive.
:param bool checksum:
Specify whether to return the address with the checksum.
Defaults to ``False``.
:return:
``dict`` with the following structure::
{
'addresses': List[Address],
Always a list, even if only one address was
generated.
}
References:
- https://github.com/iotaledger/wiki/blob/master/api-proposal.md#getnewaddress
"""
return await extended.GetNewAddressesCommand(self.adapter)(
count=count,
index=index,
securityLevel=security_level,
checksum=checksum,
seed=self.seed,
)
async def get_transaction_objects(
self,
hashes: [Iterable[TransactionHash]],
) -> dict:
"""
Fetches transaction objects from the Tangle given their
transaction IDs (hashes).
Effectively, this is :py:meth:`get_trytes` +
converting the trytes into transaction objects.
Similar to :py:meth:`find_transaction_objects`, but accepts
list of transaction hashes as input.
:param Iterable[TransactionHash] hashes:
List of transaction IDs (transaction hashes).
:return:
``dict`` with the following structure::
{
'transactions': List[Transaction],
List of Transaction objects that match the input.
}
"""
return await extended.GetTransactionObjectsCommand(self.adapter)(
hashes=hashes,
)
async def get_transfers(
self,
start: int = 0,
stop: Optional[int] = None,
inclusion_states: bool = False
) -> dict:
"""
Returns all transfers associated with the seed.
:param int start:
Starting key index.
:param Optional[int] stop:
Stop before this index.
Note that this parameter behaves like the ``stop`` attribute
in a :py:class:`slice` object; the stop index is *not*
included in the result.
If ``None`` (default), then this method will check every
address until it finds one that is unused.
.. note::
An unused address is an address that **has not been spent from**
and **has no transactions** referencing it on the Tangle.
A snapshot removes transactions from the Tangle. As a
consequence, after a snapshot, it may happen that this API does
not return the expected transfers with ``stop`` being ``None``.
As a workaround, you can save your used addresses and their
``key_index`` attribute in a local database. Use the
``start`` and ``stop`` parameters to tell the API from where to
start checking for transfers and where to stop.
:param bool inclusion_states:
Whether to also fetch the inclusion states of the transfers.
This requires an additional API call to the node, so it is
disabled by default.
:return:
``dict`` with the following structure::
{
'bundles': List[Bundle],
Matching bundles, sorted by tail transaction
timestamp.
This value is always a list, even if only one
bundle was found.
}
References:
- https://github.com/iotaledger/wiki/blob/master/api-proposal.md#gettransfers
"""
return await extended.GetTransfersCommand(self.adapter)(
seed=self.seed,
start=start,
stop=stop,
inclusionStates=inclusion_states,
)
async def is_promotable(
self,
tails: Iterable[TransactionHash],
) -> dict:
"""
Checks if tail transaction(s) is promotable by calling
:py:meth:`check_consistency` and verifying that ``attachmentTimestamp``
is above a lower bound.
Lower bound is calculated based on number of milestones issued
since transaction attachment.
:param Iterable(TransactionHash) tails:
List of tail transaction hashes.
:return:
The return type mimics that of :py:meth:`check_consistency`.
``dict`` with the following structure::
{
'promotable': bool,
If ``True``, all tails are promotable. If ``False``, see
`info` field.
'info': Optional(List[str])
If `promotable` is ``False``, this contains info about what
went wrong.
Note that when 'promotable' is ``True``, 'info' does not
exist.
}
References:
- https://github.com/iotaledger/iota.js/blob/next/api_reference.md#module_core.isPromotable
"""
return await extended.IsPromotableCommand(self.adapter)(
tails=tails,
)
async def prepare_transfer(
self,
transfers: Iterable[ProposedTransaction],
inputs: Optional[Iterable[Address]] = None,
change_address: Optional[Address] = None,
security_level: Optional[int] = None,
) -> dict:
"""
Prepares transactions to be broadcast to the Tangle, by
generating the correct bundle, as well as choosing and signing
the inputs (for value transfers).
:param Iterable[ProposedTransaction] transfers:
Transaction objects to prepare.
:param Optional[Iterable[Address]] inputs:
List of addresses used to fund the transfer.
Ignored for zero-value transfers.
If not provided, addresses will be selected automatically by
scanning the Tangle for unspent inputs. Depending on how
many transfers you've already sent with your seed, this
process could take awhile.
:param Optional[Address] change_address:
If inputs are provided, any unspent amount will be sent to
this address.
If not specified, a change address will be generated
automatically.
:param Optional[int] security_level:
Number of iterations to use when generating new addresses
(see :py:meth:`get_new_addresses`).
This value must be between 1 and 3, inclusive.
If not set, defaults to
:py:attr:`AddressGenerator.DEFAULT_SECURITY_LEVEL`.
:return:
``dict`` with the following structure::
{
'trytes': List[TransactionTrytes],
Raw trytes for the transactions in the bundle,
ready to be provided to :py:meth:`send_trytes`.
}
References:
- https://github.com/iotaledger/wiki/blob/master/api-proposal.md#preparetransfers
"""
return await extended.PrepareTransferCommand(self.adapter)(
seed=self.seed,
transfers=transfers,
inputs=inputs,
changeAddress=change_address,
securityLevel=security_level,
)
async def promote_transaction(
self,
transaction: TransactionHash,
depth: int = 3,
min_weight_magnitude: Optional[int] = None,
) -> dict:
"""
Promotes a transaction by adding spam on top of it.
:param TransactionHash transaction:
Transaction hash. Must be a tail transaction.
:param int depth:
Depth at which to attach the bundle.
Defaults to 3.
:param Optional[int] min_weight_magnitude:
Min weight magnitude, used by the node to calibrate Proof of
Work.
If not provided, a default value will be used.
:return:
``dict`` with the following structure::
{
'bundle': Bundle,
The newly-published bundle.
}
"""
if min_weight_magnitude is None:
min_weight_magnitude = self.default_min_weight_magnitude
return await extended.PromoteTransactionCommand(self.adapter)(
transaction=transaction,
depth=depth,
minWeightMagnitude=min_weight_magnitude,
)
async def replay_bundle(
self,
transaction: TransactionHash,
depth: int = 3,
min_weight_magnitude: Optional[int] = None,
) -> dict:
"""
Takes a tail transaction hash as input, gets the bundle
associated with the transaction and then replays the bundle by
attaching it to the Tangle.
:param TransactionHash transaction:
Transaction hash. Must be a tail.
:param int depth:
Depth at which to attach the bundle.
Defaults to 3.
:param Optional[int] min_weight_magnitude:
Min weight magnitude, used by the node to calibrate Proof of
Work.
If not provided, a default value will be used.
:return:
``dict`` with the following structure::
{
'trytes': List[TransactionTrytes],
Raw trytes that were published to the Tangle.
}
References:
- https://github.com/iotaledger/wiki/blob/master/api-proposal.md#replaytransfer
"""
if min_weight_magnitude is None:
min_weight_magnitude = self.default_min_weight_magnitude
return await extended.ReplayBundleCommand(self.adapter)(
transaction=transaction,
depth=depth,
minWeightMagnitude=min_weight_magnitude,
)
async def send_transfer(
self,
transfers: Iterable[ProposedTransaction],
depth: int = 3,
inputs: Optional[Iterable[Address]] = None,
change_address: Optional[Address] = None,
min_weight_magnitude: Optional[int] = None,
security_level: Optional[int] = None,
) -> dict:
"""
Prepares a set of transfers and creates the bundle, then
attaches the bundle to the Tangle, and broadcasts and stores the
transactions.
:param Iterable[ProposedTransaction] transfers:
Transfers to include in the bundle.
:param int depth:
Depth at which to attach the bundle.
Defaults to 3.
:param Optional[Iterable[Address]] inputs:
List of inputs used to fund the transfer.
Not needed for zero-value transfers.
:param Optional[Address] change_address:
If inputs are provided, any unspent amount will be sent to
this address.
If not specified, a change address will be generated
automatically.
:param Optional[int] min_weight_magnitude:
Min weight magnitude, used by the node to calibrate Proof of
Work.
If not provided, a default value will be used.
:param Optional[int] security_level:
Number of iterations to use when generating new addresses
(see :py:meth:`get_new_addresses`).
This value must be between 1 and 3, inclusive.
If not set, defaults to
:py:attr:`AddressGenerator.DEFAULT_SECURITY_LEVEL`.
:return:
``dict`` with the following structure::
{
'bundle': Bundle,
The newly-published bundle.
}
References:
- https://github.com/iotaledger/wiki/blob/master/api-proposal.md#sendtransfer
"""
if min_weight_magnitude is None:
min_weight_magnitude = self.default_min_weight_magnitude
return await extended.SendTransferCommand(self.adapter)(
seed=self.seed,
depth=depth,
transfers=transfers,
inputs=inputs,
changeAddress=change_address,
minWeightMagnitude=min_weight_magnitude,
securityLevel=security_level,
)
async def send_trytes(
self,
trytes: Iterable[TransactionTrytes],
depth: int = 3,
min_weight_magnitude: Optional[int] = None
) -> dict:
"""
Attaches transaction trytes to the Tangle, then broadcasts and
stores them.
:param Iterable[TransactionTrytes] trytes:
Transaction encoded as a tryte sequence.
:param int depth:
Depth at which to attach the bundle.
Defaults to 3.
:param Optional[int] min_weight_magnitude:
Min weight magnitude, used by the node to calibrate Proof of
Work.
If not provided, a default value will be used.
:return:
``dict`` with the following structure::
{
'trytes': List[TransactionTrytes],
Raw trytes that were published to the Tangle.
}
References:
- https://github.com/iotaledger/wiki/blob/master/api-proposal.md#sendtrytes
"""
if min_weight_magnitude is None:
min_weight_magnitude = self.default_min_weight_magnitude
return await extended.SendTrytesCommand(self.adapter)(
trytes=trytes,
depth=depth,
minWeightMagnitude=min_weight_magnitude,
)
async def is_reattachable(self, addresses: Iterable[Address]) -> dict:
"""
This API function helps you to determine whether you should
replay a transaction or make a new one (either with the same
input, or a different one).
This method takes one or more input addresses (i.e. from spent
transactions) as input and then checks whether any transactions
with a value transferred are confirmed.
If yes, it means that this input address has already been
successfully used in a different transaction, and as such you
should no longer replay the transaction.
:param Iterable[Address] addresses:
List of addresses.
:return:
``dict`` with the following structure::
{
'reattachable': List[bool],
Always a list, even if only one address was queried.
}
"""
return await extended.IsReattachableCommand(self.adapter)(
addresses=addresses
)
async def traverse_bundle(self, tail_hash: TransactionHash) -> dict:
"""
Fetches and traverses a bundle from the Tangle given a tail transaction
hash.
Recursively traverse the Tangle, collecting transactions until
we hit a new bundle.
This method is (usually) faster than :py:meth:`find_transactions`, and
it ensures we don't collect transactions from replayed bundles.
:param TransactionHash tail_hash:
Tail transaction hash of the bundle.
:return:
``dict`` with the following structure::
{
'bundle': List[Bundle],
List of matching bundles. Note that this value is
always a list, even if only one bundle was found.
}
"""
return await extended.TraverseBundleCommand(self.adapter)(
transaction=tail_hash
)
|
iotaledger/iota.lib.py
|
iota/api_async.py
|
Python
|
mit
| 57,097
|
[
"VisIt"
] |
1404b0f1b73d3a6e67c51338698a951db1aae5556cdae2703e23a342852050e0
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
import frappe
from frappe import _
from frappe.utils import format_date, get_datetime
from erpnext.utilities.transaction_base import TransactionBase
class MaintenanceVisit(TransactionBase):
def get_feed(self):
return _("To {0}").format(self.customer_name)
def validate_serial_no(self):
for d in self.get('purposes'):
if d.serial_no and not frappe.db.exists("Serial No", d.serial_no):
frappe.throw(_("Serial No {0} does not exist").format(d.serial_no))
def validate_purpose_table(self):
if not self.purposes:
frappe.throw(_("Add Items in the Purpose Table"), title="Purposes Required")
def validate_maintenance_date(self):
if self.maintenance_type == "Scheduled" and self.maintenance_schedule_detail:
item_ref = frappe.db.get_value('Maintenance Schedule Detail', self.maintenance_schedule_detail, 'item_reference')
if item_ref:
start_date, end_date = frappe.db.get_value('Maintenance Schedule Item', item_ref, ['start_date', 'end_date'])
if get_datetime(self.mntc_date) < get_datetime(start_date) or get_datetime(self.mntc_date) > get_datetime(end_date):
frappe.throw(_("Date must be between {0} and {1}")
.format(format_date(start_date), format_date(end_date)))
def validate(self):
self.validate_serial_no()
self.validate_maintenance_date()
self.validate_purpose_table()
def update_status_and_actual_date(self, cancel=False):
status = "Pending"
actual_date = None
if not cancel:
status = self.completion_status
actual_date = self.mntc_date
if self.maintenance_schedule_detail:
frappe.db.set_value('Maintenance Schedule Detail', self.maintenance_schedule_detail, 'completion_status', status)
frappe.db.set_value('Maintenance Schedule Detail', self.maintenance_schedule_detail, 'actual_date', actual_date)
def update_customer_issue(self, flag):
if not self.maintenance_schedule:
for d in self.get('purposes'):
if d.prevdoc_docname and d.prevdoc_doctype == 'Warranty Claim' :
if flag==1:
mntc_date = self.mntc_date
service_person = d.service_person
work_done = d.work_done
status = "Open"
if self.completion_status == 'Fully Completed':
status = 'Closed'
elif self.completion_status == 'Partially Completed':
status = 'Work In Progress'
else:
nm = frappe.db.sql("select t1.name, t1.mntc_date, t2.service_person, t2.work_done from `tabMaintenance Visit` t1, `tabMaintenance Visit Purpose` t2 where t2.parent = t1.name and t1.completion_status = 'Partially Completed' and t2.prevdoc_docname = %s and t1.name!=%s and t1.docstatus = 1 order by t1.name desc limit 1", (d.prevdoc_docname, self.name))
if nm:
status = 'Work In Progress'
mntc_date = nm and nm[0][1] or ''
service_person = nm and nm[0][2] or ''
work_done = nm and nm[0][3] or ''
else:
status = 'Open'
mntc_date = None
service_person = None
work_done = None
wc_doc = frappe.get_doc('Warranty Claim', d.prevdoc_docname)
wc_doc.update({
'resolution_date': mntc_date,
'resolved_by': service_person,
'resolution_details': work_done,
'status': status
})
wc_doc.db_update()
def check_if_last_visit(self):
"""check if last maintenance visit against same sales order/ Warranty Claim"""
check_for_docname = None
for d in self.get('purposes'):
if d.prevdoc_docname:
check_for_docname = d.prevdoc_docname
#check_for_doctype = d.prevdoc_doctype
if check_for_docname:
check = frappe.db.sql("select t1.name from `tabMaintenance Visit` t1, `tabMaintenance Visit Purpose` t2 where t2.parent = t1.name and t1.name!=%s and t2.prevdoc_docname=%s and t1.docstatus = 1 and (t1.mntc_date > %s or (t1.mntc_date = %s and t1.mntc_time > %s))", (self.name, check_for_docname, self.mntc_date, self.mntc_date, self.mntc_time))
if check:
check_lst = [x[0] for x in check]
check_lst =','.join(check_lst)
frappe.throw(_("Cancel Material Visits {0} before cancelling this Maintenance Visit").format(check_lst))
raise Exception
else:
self.update_customer_issue(0)
def on_submit(self):
self.update_customer_issue(1)
frappe.db.set(self, 'status', 'Submitted')
self.update_status_and_actual_date()
def on_cancel(self):
self.check_if_last_visit()
frappe.db.set(self, 'status', 'Cancelled')
self.update_status_and_actual_date(cancel=True)
def on_update(self):
pass
|
frappe/erpnext
|
erpnext/maintenance/doctype/maintenance_visit/maintenance_visit.py
|
Python
|
gpl-3.0
| 4,558
|
[
"VisIt"
] |
929e8e094b6b1984843942bb2eb9abae44573b3c7d72309708f266deed5f0008
|
# coding: utf-8
from __future__ import division, unicode_literals
'''
Created on Sep 23, 2011
'''
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2011, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyuep@gmail.com"
__date__ = "Sep 23, 2011"
import os
import random
import unittest
import json
import six
from pymatgen.core.lattice import Lattice
from pymatgen.core import PeriodicSite
from monty.json import MontyDecoder
from pymatgen.io.vasp.inputs import Poscar
from pymatgen.transformations.standard_transformations import *
from pymatgen.symmetry.structure import SymmetrizedStructure
test_dir = os.path.join(os.path.dirname(__file__), "..", "..", "..",
'test_files')
class TransformationsTest(unittest.TestCase):
def setUp(self):
coords = list()
coords.append([0, 0, 0])
coords.append([0.75, 0.5, 0.75])
lattice = Lattice([[3.8401979337, 0.00, 0.00],
[1.9200989668, 3.3257101909, 0.00],
[0.00, -2.2171384943, 3.1355090603]])
self.struct = Structure(lattice, ["Si"] * 2, coords)
def test_identity_transformation(self):
t = IdentityTransformation()
self.assertEqual(self.struct, t.apply_transformation(self.struct))
def test_to_from_dict(self):
t = IdentityTransformation()
d = t.as_dict()
self.assertIn("version", t.as_dict())
self.assertIn("init_args", t.as_dict())
self.assertEqual(type(IdentityTransformation.from_dict(d)),
IdentityTransformation)
def test_rotation_transformation(self):
t = RotationTransformation([0, 1, 0], 30, False)
s2 = t.apply_transformation(self.struct)
s1 = t.inverse.apply_transformation(s2)
self.assertTrue((abs(s1.lattice.matrix - self.struct.lattice.matrix)
< 1e-8).all())
class RemoveSpeciesTransformationTest(unittest.TestCase):
def test_apply_transformation(self):
t = RemoveSpeciesTransformation(["Li+"])
coords = list()
coords.append([0, 0, 0])
coords.append([0.75, 0.75, 0.75])
coords.append([0.5, 0.5, 0.5])
coords.append([0.25, 0.25, 0.25])
lattice = Lattice([[3.8401979337, 0.00, 0.00],
[1.9200989668, 3.3257101909, 0.00],
[0.00, -2.2171384943, 3.1355090603]])
struct = Structure(lattice, ["Li+", "Li+", "O2-", "O2-"], coords)
s = t.apply_transformation(struct)
self.assertEqual(s.composition.formula, "O2")
class SubstitutionTransformationTest(unittest.TestCase):
def test_apply_transformation(self):
t = SubstitutionTransformation({"Li+": "Na+", "O2-": "S2-"})
coords = list()
coords.append([0, 0, 0])
coords.append([0.75, 0.75, 0.75])
coords.append([0.5, 0.5, 0.5])
coords.append([0.25, 0.25, 0.25])
lattice = Lattice([[3.8401979337, 0.00, 0.00],
[1.9200989668, 3.3257101909, 0.00],
[0.00, -2.2171384943, 3.1355090603]])
struct = Structure(lattice, ["Li+", "Li+", "O2-", "O2-"], coords)
s = t.apply_transformation(struct)
self.assertEqual(s.composition.formula, "Na2 S2")
def test_fractional_substitution(self):
t = SubstitutionTransformation({"Li+": "Na+",
"O2-": {"S2-": 0.5, "Se2-": 0.5}})
#test the to and from dict on the nested dictionary
t = SubstitutionTransformation.from_dict(t.as_dict())
coords = list()
coords.append([0, 0, 0])
coords.append([0.75, 0.75, 0.75])
coords.append([0.5, 0.5, 0.5])
coords.append([0.25, 0.25, 0.25])
lattice = Lattice([[3.8401979337, 0.00, 0.00],
[1.9200989668, 3.3257101909, 0.00],
[0.00, -2.2171384943, 3.1355090603]])
struct = Structure(lattice, ["Li+", "Li+", "O2-", "O2-"], coords)
s = t.apply_transformation(struct)
self.assertEqual(s.composition.formula, "Na2 Se1 S1")
class SupercellTransformationTest(unittest.TestCase):
def setUp(self):
coords = list()
coords.append([0, 0, 0])
coords.append([0.75, 0.75, 0.75])
coords.append([0.5, 0.5, 0.5])
coords.append([0.25, 0.25, 0.25])
lattice = Lattice([[3.8401979337, 0.00, 0.00],
[1.9200989668, 3.3257101909, 0.00],
[0.00, -2.2171384943, 3.1355090603]])
self.struct = Structure(lattice, ["Li+", "Li+", "O2-", "O2-"], coords)
def test_apply_transformation(self):
t = SupercellTransformation([[2, 1, 0], [0, 2, 0], [1, 0, 2]])
s = t.apply_transformation(self.struct)
self.assertEqual(s.composition.formula, "Li16 O16")
def test_from_scaling_factors(self):
scale_factors = [random.randint(1, 5) for i in range(3)]
t = SupercellTransformation.from_scaling_factors(*scale_factors)
s = t.apply_transformation(self.struct)
self.assertEqual(s.num_sites,
4 * six.moves.reduce(lambda a, b: a * b,
scale_factors))
class OxidationStateDecorationTransformationTest(unittest.TestCase):
def test_apply_transformation(self):
t = OxidationStateDecorationTransformation({"Li": 1, "O":-2})
coords = list()
coords.append([0, 0, 0])
coords.append([0.75, 0.75, 0.75])
coords.append([0.5, 0.5, 0.5])
coords.append([0.25, 0.25, 0.25])
lattice = Lattice([[3.8401979337, 0.00, 0.00],
[1.9200989668, 3.3257101909, 0.00],
[0.00, -2.2171384943, 3.1355090603]])
struct = Structure(lattice, ["Li", "Li", "O", "O"], coords)
s = t.apply_transformation(struct)
self.assertEqual(s[0].species_string, "Li+")
self.assertEqual(s[2].species_string, "O2-")
class AutoOxiStateDecorationTransformationTest(unittest.TestCase):
def test_apply_transformation(self):
p = Poscar.from_file(os.path.join(test_dir, 'POSCAR.LiFePO4'),
check_for_POTCAR=False)
t = AutoOxiStateDecorationTransformation()
s = t.apply_transformation(p.structure)
expected_oxi = {"Li": 1, "P": 5, "O":-2, "Fe": 2}
for site in s:
self.assertEqual(site.specie.oxi_state,
expected_oxi[site.specie.symbol])
def to_from_dict(self):
t = AutoOxiStateDecorationTransformation()
d = t.as_dict()
t = AutoOxiStateDecorationTransformation.from_dict(d)
self.assertEqual(t.analyzer.dist_scale_factor, 1.015)
class OxidationStateRemovalTransformationTest(unittest.TestCase):
def test_apply_transformation(self):
t = OxidationStateRemovalTransformation()
coords = list()
coords.append([0, 0, 0])
coords.append([0.75, 0.75, 0.75])
coords.append([0.5, 0.5, 0.5])
coords.append([0.25, 0.25, 0.25])
lattice = Lattice([[3.8401979337, 0.00, 0.00],
[1.9200989668, 3.3257101909, 0.00],
[0.00, -2.2171384943, 3.1355090603]])
struct = Structure(lattice, ["Li+", "Li+", "O2-", "O2-"], coords)
s = t.apply_transformation(struct)
self.assertEqual(s[0].species_string, "Li")
self.assertEqual(s[2].species_string, "O")
class PartialRemoveSpecieTransformationTest(unittest.TestCase):
def test_apply_transformation(self):
t = PartialRemoveSpecieTransformation("Li+", 1.0 / 3, True)
coords = list()
coords.append([0, 0, 0])
coords.append([0.75, 0.75, 0.75])
coords.append([0.5, 0.5, 0.5])
coords.append([0.25, 0.25, 0.25])
lattice = Lattice([[3.8401979337, 0.00, 0.00],
[1.9200989668, 3.3257101909, 0.00],
[0.00, -2.2171384943, 3.1355090603]])
struct = Structure(lattice, ["Li+", "Li+", "Li+", "O2-"], coords)
self.assertEqual(len(t.apply_transformation(struct, 100)), 2)
def test_apply_transformation_fast(self):
t = PartialRemoveSpecieTransformation("Li+", 0.5)
coords = list()
coords.append([0, 0, 0])
coords.append([0.75, 0.75, 0.75])
coords.append([0.5, 0.5, 0.5])
coords.append([0.25, 0.25, 0.25])
coords.append([0.1, 0.1, 0.1])
coords.append([0.3, 0.75, 0.3])
lattice = Lattice([[10, 0.00, 0.00], [0, 10, 0.00], [0.00, 0, 10]])
struct = Structure(lattice, ["Li+"] * 6, coords)
fast_opt_s = t.apply_transformation(struct)
t = PartialRemoveSpecieTransformation("Li+", 0.5,
PartialRemoveSpecieTransformation.ALGO_COMPLETE)
slow_opt_s = t.apply_transformation(struct)
self.assertAlmostEqual(EwaldSummation(fast_opt_s).total_energy,
EwaldSummation(slow_opt_s).total_energy, 4)
self.assertEqual(fast_opt_s, slow_opt_s)
def test_apply_transformations_complete_ranking(self):
p = Poscar.from_file(os.path.join(test_dir, 'POSCAR.LiFePO4'),
check_for_POTCAR=False)
t1 = OxidationStateDecorationTransformation({"Li": 1, "Fe": 2, "P": 5,
"O":-2})
s = t1.apply_transformation(p.structure)
t = PartialRemoveSpecieTransformation("Li+", 0.5,
PartialRemoveSpecieTransformation.ALGO_COMPLETE)
self.assertEqual(len(t.apply_transformation(s, 10)), 6)
def test_apply_transformations_best_first(self):
p = Poscar.from_file(os.path.join(test_dir, 'POSCAR.LiFePO4'),
check_for_POTCAR=False)
t1 = OxidationStateDecorationTransformation({"Li": 1, "Fe": 2, "P": 5,
"O":-2})
s = t1.apply_transformation(p.structure)
t = PartialRemoveSpecieTransformation("Li+", 0.5,
PartialRemoveSpecieTransformation.ALGO_BEST_FIRST)
self.assertEqual(len(t.apply_transformation(s)), 26)
class OrderDisorderedStructureTransformationTest(unittest.TestCase):
def test_apply_transformation(self):
t = OrderDisorderedStructureTransformation()
coords = list()
coords.append([0, 0, 0])
coords.append([0.75, 0.75, 0.75])
coords.append([0.5, 0.5, 0.5])
coords.append([0.25, 0.25, 0.25])
lattice = Lattice([[3.8401979337, 0.00, 0.00],
[1.9200989668, 3.3257101909, 0.00],
[0.00, -2.2171384943, 3.1355090603]])
struct = Structure(lattice, [{"Si4+":0.5, "O2-": 0.25, "P5+": 0.25},
{"Si4+":0.5, "O2-": 0.25, "P5+": 0.25},
{"Si4+":0.5, "O2-": 0.25, "P5+": 0.25},
{"Si4+":0.5, "O2-": 0.25, "P5+": 0.25}],
coords)
output = t.apply_transformation(struct, return_ranked_list=50)
self.assertEqual(len(output), 12)
self.assertIsInstance(output[0]['structure'], Structure)
struct = Structure(lattice, [{"Si4+": 0.5}, {"Si4+": 0.5},
{"P5+": 0.5, "O2-": 0.5},
{"P5+": 0.5, "O2-": 0.5}],
coords)
output = t.apply_transformation(struct, return_ranked_list=50)
self.assertIsInstance(output, list)
self.assertEqual(len(output), 4)
self.assertEqual(t.lowest_energy_structure, output[0]['structure'])
struct = Structure(lattice, [{"Si4+":0.5}, {"Si4+":0.5}, {"O2-": 0.5},
{"O2-": 0.5}], coords)
allstructs = t.apply_transformation(struct, 50)
self.assertEqual(len(allstructs), 4)
struct = Structure(lattice, [{"Si4+": 0.333}, {"Si4+": 0.333},
{"Si4+": 0.333}, "O2-"], coords)
allstructs = t.apply_transformation(struct, 50)
self.assertEqual(len(allstructs), 3)
def test_symmetrized_structure(self):
t = OrderDisorderedStructureTransformation(symmetrized_structures=True)
c = []
sp = []
c.append([0.5, 0.5, 0.5])
sp.append('Si4+')
c.append([0.45, 0.45, 0.45])
sp.append({"Si4+": 0.5})
c.append([0.56, 0.56, 0.56])
sp.append({"Si4+": 0.5})
c.append([0.25, 0.75, 0.75])
sp.append({"Si4+": 0.5})
c.append([0.75, 0.25, 0.25])
sp.append({"Si4+": 0.5})
l = Lattice.cubic(5)
s = Structure(l, sp, c)
test_site = PeriodicSite("Si4+", c[2], l)
s = SymmetrizedStructure(s, 'not_real', [0,1,1,2,2])
output = t.apply_transformation(s)
self.assertTrue(test_site in output.sites)
def test_too_small_cell(self):
t = OrderDisorderedStructureTransformation()
coords = list()
coords.append([0.5, 0.5, 0.5])
lattice = Lattice([[3.8401979337, 0.00, 0.00],
[1.9200989668, 3.3257101909, 0.00],
[0.00, -2.2171384943, 3.1355090603]])
struct = Structure(lattice, [{"X4+":0.33, "O2-": 0.33, "P5+": 0.33}],
coords)
self.assertRaises(ValueError, t.apply_transformation, struct)
def test_best_first(self):
t = OrderDisorderedStructureTransformation(algo=2)
coords = list()
coords.append([0, 0, 0])
coords.append([0.75, 0.75, 0.75])
coords.append([0.5, 0.5, 0.5])
coords.append([0.25, 0.25, 0.25])
lattice = Lattice([[3.8401979337, 0.00, 0.00],
[1.9200989668, 3.3257101909, 0.00],
[0.00, -2.2171384943, 3.1355090603]])
struct = Structure(lattice, [{"Si4+": 0.5, "O2-": 0.25, "P5+": 0.25},
{"Si4+": 0.5, "O2-": 0.25, "P5+": 0.25},
{"Si4+": 0.5, "O2-": 0.25, "P5+": 0.25},
{"Si4+": 0.5, "O2-": 0.25, "P5+": 0.25}],
coords)
output = t.apply_transformation(struct, return_ranked_list=3)
self.assertAlmostEqual(output[0]['energy'], -175.0599307, 4,
'got incorrect energy')
class PrimitiveCellTransformationTest(unittest.TestCase):
def test_apply_transformation(self):
t = PrimitiveCellTransformation()
coords = list()
coords.append([0, 0, 0])
coords.append([0.375, 0.375, 0.375])
coords.append([.5, .5, .5])
coords.append([0.875, 0.875, 0.875])
coords.append([0.125, 0.125, 0.125])
coords.append([0.25, 0.25, 0.25])
coords.append([0.625, 0.625, 0.625])
coords.append([0.75, 0.75, 0.75])
lattice = Lattice([[3.8401979337, 0.00, 0.00],
[1.9200989668, 3.3257101909, 0.00],
[0.00, -2.2171384943, 3.1355090603]])
struct = Structure(lattice, ["Li+", "Li+", "Li+", "Li+",
"O2-", "O2-", "O2-", "O2-"],
coords)
s = t.apply_transformation(struct)
self.assertEqual(len(s), 4)
with open(os.path.join(test_dir, "TiO2_super.json")) as f:
s = json.load(f, cls=MontyDecoder)
prim = t.apply_transformation(s)
self.assertEqual(prim.formula, "Ti4 O8")
class PerturbStructureTransformationTest(unittest.TestCase):
def test_apply_transformation(self):
t = PerturbStructureTransformation(0.05)
coords = list()
coords.append([0, 0, 0])
coords.append([0.375, 0.375, 0.375])
coords.append([.5, .5, .5])
coords.append([0.875, 0.875, 0.875])
coords.append([0.125, 0.125, 0.125])
coords.append([0.25, 0.25, 0.25])
coords.append([0.625, 0.625, 0.625])
coords.append([0.75, 0.75, 0.75])
lattice = [[3.8401979337, 0.00, 0.00],
[1.9200989668, 3.3257101909, 0.00],
[0.00, -2.2171384943, 3.1355090603]]
struct = Structure(lattice, ["Li+", "Li+", "Li+", "Li+",
"O2-", "O2-", "O2-", "O2-"], coords)
transformed_s = t.apply_transformation(struct)
for i, site in enumerate(transformed_s):
self.assertAlmostEqual(site.distance(struct[i]), 0.05)
if __name__ == "__main__":
unittest.main()
|
rousseab/pymatgen
|
pymatgen/transformations/tests/test_standard_transformations.py
|
Python
|
mit
| 16,832
|
[
"VASP",
"pymatgen"
] |
a7b4ac8babda210137ea31f08eee882671cd3f0b99d6e0dd2ee18072b59eabf6
|
# force floating point division. Can still use integer with //
from __future__ import division
# other good compatibility recquirements for python3
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
# This file is used for importing the common utilities classes.
import numpy as np
import matplotlib.pyplot as plt
import sys,scipy
from FitUtil.EnergyLandscapes.Inverse_Boltzmann.Python.Code import \
InverseBoltzmann
def get_extension_bins_and_distribution(extension,bins):
"""
returns the (normalized) probability ditribution of extension bins
Args:
extension: array to digitize
bins: passed to np.histogram
Returns:
tuple of <left side of bins, histogram distribution>
"""
distribution,bins = np.histogram(a=extension,bins=bins,normed=True)
bins = bins[:-1]
return bins,distribution
def _normalization_factor_for_histogram_to_sum_1(P_q):
"""
Returns what to multiply each extension bin by such that
P_q will sum and integrate to one (assuming renormalization of P_q
after multiplying the extensions
Args:
P_q: a probability distribution
Returns:
see description
"""
# We want (1) and (2):
# (1) Int P_q dq ~ sum_q (P(q) dq) = 1
# (2) sum_q P_q = 1
# it follows if we choose
# dq -> dq * sum_q P_q
# and enforce normaliztion (1), then
# P_q -> P_q / (sum_q P_q)
# so that we get 2 for free
return sum(P_q)
def _normalize(factor,P_q,q,*args):
"""
See enforce_normalization_sum_1(P_q,q,*args), except 'factor' is manually
specified
Args:
factor: what to multiply all the extension variables by
Returns:
see enforce_normalization_sum_1(P_q,q,*args)
"""
q_ret = q * factor
to_ret = [a*factor for a in args]
P_q_ret = P_q/np.trapz(y=P_q,x=q_ret)
return [factor,P_q_ret,q_ret] + to_ret
def enforce_normalization_sum_1(P_q,q,*args):
"""
Ensures that (1) and (2) from _normalization_factor_for_histogram_to_sum_1
are satisfied (ie: the probability sums and integrates to one)
Args:
P_q: probability distribution, size N
q: extension, size N
*args: any addition quantities to determine; same units as q
Returns:
tuple of <factor,P_q_normalized,q_normalized, all of *args normalized>
"""
factor = _normalization_factor_for_histogram_to_sum_1(P_q)
return _normalize(factor,P_q,q,*args)
def denormalize(factor,P_q,q,*args):
"""
See enforce_normalization_sum_1, except returns denomalized arrays
(ie: multiplying by 1/factor instead of factor
"""
return _normalize(1/factor,P_q,q,*args)
def normalize_to_sum_1(bins,extension,gaussian_stdev):
"""
normalize the given quantitites such that the distribution has both sum and
integral one
Args
extension: size N, same units as gaussian_stdev
gaussian_stdev: the standard deviation of the gaussian psf
bins: the input to np.histogram
Returns:
see enforce_normalization_sum_1, except returns gausian_Stdev also
"""
# get the extension distribution in whatever units the user gives us
bins,P_q = get_extension_bins_and_distribution(extension,bins=bins)
# very important to enforce normalization.
to_ret = enforce_normalization_sum_1(P_q,bins,extension,gaussian_stdev)
return to_ret
def extension_deconvolution(gaussian_stdev,extension,bins,
interpolate_kwargs = dict(),
deconvolve_common_kwargs=dict(p_0=None,
n_iters=300,
delta_tol=1e-9,
return_full=False,
r_0=1)):
"""
deconvolves an extension vs time trace
Args:
gaussian_stdev: of the (assumed gaussian) point-spread function
extension: extension vs time, size N
bins: passed to get_extension_bins_and_distribution (and np.histogram).
Can be a number, of list of left bin edges
interpolate_kwargs: see
InverseBoltzmann.interpolate_and_deconvolve_gaussian_psf
deconvolve_common_kwargs: passed as kwargs to
interpolate_and_deconvolve_gaussian_psf
Returns:
tuple of <interpolated extensions, raw probability,
deconvolved probability>
"""
extension_factor,P_q_u,bins_u,extension_u,gaussian_stdev_u = \
normalize_to_sum_1(bins,extension,gaussian_stdev)
sum_to_check = sum(P_q_u)
int_to_check = np.trapz(y=P_q_u,x=bins_u)
assert abs(sum_to_check - 1) < 1e-2 , \
"Sum-normalization didn't work, got {:.4g}, not 1".format(sum_to_check)
assert (int_to_check-1) < 1e-2 , \
"Int-normalization didn't work, got {:.4g}, not 1".format(int_to_check)
# POST: everything is normalized as we want (or within X%; this shouldn't
# cause extra numerical instability ).
deconvolve_kwargs = dict(gaussian_stdev=gaussian_stdev_u,
extension_bins = bins_u,
P_q = P_q_u,
interp_kwargs=interpolate_kwargs,
**deconvolve_common_kwargs)
interp_ext,interp_prob,deconv_interpolated_probability = \
interpolate_and_deconvolve_gaussian_psf(**deconvolve_kwargs)
# convert the extensions back from their unnormalized format, renormalize
# the probabilities so that they match up
# note: we *dont* normalize interp_ext twice (so it is '_' the first time)
_,interp_prob,_ = \
denormalize(extension_factor,interp_prob,interp_ext)
_,deconv_interpolated_probability,interp_ext = \
denormalize(extension_factor,deconv_interpolated_probability,interp_ext)
return interp_ext,interp_prob,deconv_interpolated_probability
def smart_interpolation_factor(extension,bins,gaussian_stdev,**kw):
"""
Args:
exntesion, bins: see get_extension_bins_and_distribution
gaussian_stdev: see upscale_factor_by_stdev
Returns:
the 'smart' choice of interpolation factor, given data and
gaussian_stdev. Useful for avoiding convolution problems.
"""
ext_bins,_ = get_extension_bins_and_distribution(extension,bins=bins)
interpolation_factor = \
upscale_factor_by_stdev(extension_bins=ext_bins,
gaussian_stdev=gaussian_stdev,**kw)
return interpolation_factor
def interpolate_output(output_bins,interp_ext,interp_prob,
prob_deconc,**kw):
"""
given probability distributions and two x grids, interpolates back
Args:
output_bins: the desired x grid
interp_ext: the interpolated x grid
interp_prob/prob_deconc: the interpolated (but not deconvolve)
probability and the interpolated and deconvolved probability
Returns:
a tuple of the new X, and two probability distributions
"""
# re-calculate all the results onto the interpolated grid
interp_prob = scipy.interpolate.interp1d(x=interp_ext,
y=interp_prob,**kw)(output_bins)
prob_deconc = scipy.interpolate.interp1d(x=interp_ext,
y=prob_deconc,**kw)(output_bins)
interp_ext = output_bins
interp_prob /= np.trapz(y=interp_prob,x=interp_ext)
prob_deconc /= np.trapz(y=prob_deconc,x=interp_ext)
return interp_ext,interp_prob,prob_deconc
def run(gaussian_stdev,extension,bins,interpolate_kwargs=dict(),
smart_interpolation=True,**kw):
"""
Returns the deconvolved...
Args:
see extension_deconvolution, except:
smart_interpolation: boolean, if true, then gets upscale (for
extension_deconvolution) based on the gaussian standard deviation
Returns:
tuple of <interpolated extension, probability, and deconvolved
probability>
"""
if (smart_interpolation):
interpolation_factor = smart_interpolation_factor(extension,bins,
gaussian_stdev)
interpolate_kwargs['upscale'] = interpolation_factor
interp_ext,interp_prob,prob_deconc = \
extension_deconvolution(gaussian_stdev,
extension,bins,
interpolate_kwargs=interpolate_kwargs,
**kw)
return interp_ext,interp_prob,prob_deconc
def save_data(out_file,interp_ext,interp_prob,prob_deconc,output_interpolated,
bins,delimiter=",",fmt=str("%.15g"),**kw):
"""
saves the given data, possibly interpolating back to its original grid
Args:
out_file: where to save (as csv)
interp_ext,interp_prob: the non-deconvolved bins and probability
prob_deconc: the deconvolved probability
output_interpolated: boolean, if true, interpolates back to bins
remainder: passed to savetxt
"""
if (not output_interpolated):
# then interpolate back to the original bins
output_bins = np.linspace(min(interp_ext),max(interp_ext),num=bins,
endpoint=True)
interp_ext,interp_prob,prob_deconc = \
interpolate_output(output_bins,interp_ext,interp_prob,
prob_deconc)
header = "# extension bin -- raw probability -- deconvolved probability" + \
" (Inverse Boltzmann, (c) Patrick Heenan 2017)"
X = np.array(((interp_ext,interp_prob,prob_deconc))).T
np.savetxt(fname=out_file,X=X,delimiter=delimiter,fmt=fmt,**kw)
def run_and_save_data(gaussian_stdev,extension,bins,out_file,
run_kwargs=dict(interpolate_kwargs=dict()),
save_kwargs=dict(output_interpolated=True)):
"""
Runs a deconvolution, saving the data out
Args:
see run, except for save_kwargs, see save_data
Returns:
nothing
"""
interp_ext,interp_prob,prob_deconc = run(gaussian_stdev,extension,bins,
**run_kwargs)
save_data(out_file,interp_ext,interp_prob,prob_deconc,bins=bins,
**save_kwargs)
def upscale_factor_by_stdev(extension_bins,gaussian_stdev,n_per_bin=25):
"""
Returns: the maximum of 1, or n_per_bin * (size of stdev in terms of
the median size of extension_bins)
Args:
n_per_bin: how many bins should be in 1 gaussian_stdev
others: see interpolate_and_deconvolve_gaussian_psf
Returns:
gaussian bins
"""
median_bin_size = np.abs(np.median(np.diff(extension_bins)))
return max(1,n_per_bin*(median_bin_size/gaussian_stdev))
def interpolate_and_deconvolve_gaussian_psf(gaussian_stdev,extension_bins,P_q,
interp_kwargs=dict(),
**deconvolve_kwargs):
"""
Ease-of-use function for deconvolving a gaussian point-spread function.
Args:
see gaussian_deconvolve, except...
interp_kwargs: passed to get_interpolated_probability
Returns:
tuple of <interpolated ext, interpolated probability, deconvolved and
interpolated probability>
"""
# get the interpolated probabilities
interp_ext,interp_prob = get_interpolated_probability(ext=extension_bins,
raw_prob=P_q,
**interp_kwargs)
deconv_interpolated_probability = InverseBoltzmann.\
gaussian_deconvolve(extension_bins=interp_ext,
P_q=interp_prob,
gaussian_stdev=gaussian_stdev,
**deconvolve_kwargs)
return interp_ext,interp_prob,deconv_interpolated_probability
def get_interpolated_probability(ext,raw_prob,
upscale=10,kind='linear',
interp_ext=None,**kwargs):
"""
returns an interpolated probability (possibly on a different x grid)
Args:
ext: the x values for raw_prob, size N
raw_prob: the y values to interpolate, size N
upscale: if interp_ext is none, just gets this many more points
linearly, interpolated along ext (so size upscale_factor* N). If
no interpolation is desired, just set this to one
interp_ext: if not none, the grid to interpolate along.
kind,**kwargs: passed to interp1d
"""
if (interp_ext is None):
if (upscale > 1):
# we have something to do!
interp_ext = np.linspace(start=min(ext),
stop=max(ext),
num=ext.size*upscale)
else:
# no interpolation desired
interp_ext = ext
interp_smoothed_prob_f = scipy.interpolate.interp1d(x=ext,
y=raw_prob,
kind=kind,**kwargs)
# get the probability at each extension, normalize
interp_smoothed_prob = interp_smoothed_prob_f(interp_ext)
interp_smoothed_prob = np.maximum(0,interp_smoothed_prob)
interp_smoothed_prob /= np.trapz(y=interp_smoothed_prob,x=interp_ext)
return interp_ext,interp_smoothed_prob
|
prheenan/BioModel
|
EnergyLandscapes/Inverse_Boltzmann/Python/Code/InverseBoltzmannUtil.py
|
Python
|
gpl-2.0
| 13,659
|
[
"Gaussian"
] |
c1e53aae3dd330c650f9f5ceb96126b411fbc5d45a0d9544d57b6a78daaa0287
|
# shamelessly copied from pliExpertInfo (Vali, Mirakels, Littlesat)
from enigma import iServiceInformation, iPlayableService
from Components.Converter.Converter import Converter
from Components.Element import cached
from Components.config import config
from Tools.Transponder import ConvertToHumanReadable
from Tools.GetEcmInfo import GetEcmInfo
from Poll import Poll
from Components.Converter.ChannelNumbers import channelnumbers
def addspace(text):
if text:
text += " "
return text
class PliExtraInfo(Poll, Converter, object):
def __init__(self, type):
Converter.__init__(self, type)
Poll.__init__(self)
self.type = type
self.poll_interval = 1000
self.poll_enabled = True
self.caid_data = (
( "0x100", "0x1ff", "Seca", "S", True ),
( "0x500", "0x5ff", "Via", "V", True ),
( "0x600", "0x6ff", "Irdeto", "I", True ),
( "0x900", "0x9ff", "NDS", "Nd", True ),
( "0xb00", "0xbff", "Conax", "Co", True ),
( "0xd00", "0xdff", "CryptoW", "Cw", True ),
( "0xe00", "0xeff", "PowerVU", "P", False ),
("0x1700", "0x17ff", "Beta", "B", True ),
("0x1800", "0x18ff", "Nagra", "N", True ),
("0x2600", "0x2600", "Biss", "Bi", False ),
("0x4ae0", "0x4ae1", "Dre", "D", False ),
("0x4aee", "0x4aee", "BulCrypt", "B1", False ),
("0x5581", "0x5581", "BulCrypt", "B2", False )
)
self.ca_table = (
("CryptoCaidSecaAvailable", "S", False),
("CryptoCaidViaAvailable", "V", False),
("CryptoCaidIrdetoAvailable", "I", False),
("CryptoCaidNDSAvailable", "Nd", False),
("CryptoCaidConaxAvailable", "Co", False),
("CryptoCaidCryptoWAvailable", "Cw", False),
("CryptoCaidPowerVUAvailable", "P", False),
("CryptoCaidBetaAvailable", "B", False),
("CryptoCaidNagraAvailable", "N", False),
("CryptoCaidBissAvailable", "Bi", False),
("CryptoCaidDreAvailable", "D", False),
("CryptoCaidBulCrypt1Available","B1", False),
("CryptoCaidBulCrypt2Available","B2", False),
("CryptoCaidSecaSelected", "S", True),
("CryptoCaidViaSelected", "V", True),
("CryptoCaidIrdetoSelected", "I", True),
("CryptoCaidNDSSelected", "Nd", True),
("CryptoCaidConaxSelected", "Co", True),
("CryptoCaidCryptoWSelected", "Cw", True),
("CryptoCaidPowerVUSelected", "P", True),
("CryptoCaidBetaSelected", "B", True),
("CryptoCaidNagraSelected", "N", True),
("CryptoCaidBissSelected", "Bi", True),
("CryptoCaidDreSelected", "D", True),
("CryptoCaidBulCrypt1Selected", "B1", True),
("CryptoCaidBulCrypt2Selected", "B2", True),
)
self.ecmdata = GetEcmInfo()
self.feraw = self.fedata = self.updateFEdata = None
def getCryptoInfo(self, info):
if info.getInfo(iServiceInformation.sIsCrypted) == 1:
data = self.ecmdata.getEcmData()
self.current_source = data[0]
self.current_caid = data[1]
self.current_provid = data[2]
self.current_ecmpid = data[3]
else:
self.current_source = ""
self.current_caid = "0"
self.current_provid = "0"
self.current_ecmpid = "0"
def createCryptoBar(self, info):
res = ""
available_caids = info.getInfoObject(iServiceInformation.sCAIDs)
for caid_entry in self.caid_data:
if int(caid_entry[0], 16) <= int(self.current_caid, 16) <= int(caid_entry[1], 16):
color="\c0000??00"
else:
color = "\c007?7?7?"
try:
for caid in available_caids:
if int(caid_entry[0], 16) <= caid <= int(caid_entry[1], 16):
color="\c00????00"
except:
pass
if color != "\c007?7?7?" or caid_entry[4]:
if res: res += " "
res += color + caid_entry[3]
res += "\c00??????"
return res
def createCryptoSeca(self, info):
available_caids = info.getInfoObject(iServiceInformation.sCAIDs)
if int('0x100', 16) <= int(self.current_caid, 16) <= int('0x1ff', 16):
color="\c004c7d3f"
else:
color = "\c009?9?9?"
try:
for caid in available_caids:
if int('0x100', 16) <= caid <= int('0x1ff', 16):
color="\c00eeee00"
except:
pass
res = color + 'S'
res += "\c00??????"
return res
def createCryptoVia(self, info):
available_caids = info.getInfoObject(iServiceInformation.sCAIDs)
if int('0x500', 16) <= int(self.current_caid, 16) <= int('0x5ff', 16):
color="\c004c7d3f"
else:
color = "\c009?9?9?"
try:
for caid in available_caids:
if int('0x500', 16) <= caid <= int('0x5ff', 16):
color="\c00eeee00"
except:
pass
res = color + 'V'
res += "\c00??????"
return res
def createCryptoIrdeto(self, info):
available_caids = info.getInfoObject(iServiceInformation.sCAIDs)
if int('0x600', 16) <= int(self.current_caid, 16) <= int('0x6ff', 16):
color="\c004c7d3f"
else:
color = "\c009?9?9?"
try:
for caid in available_caids:
if int('0x600', 16) <= caid <= int('0x6ff', 16):
color="\c00eeee00"
except:
pass
res = color + 'I'
res += "\c00??????"
return res
def createCryptoNDS(self, info):
available_caids = info.getInfoObject(iServiceInformation.sCAIDs)
if int('0x900', 16) <= int(self.current_caid, 16) <= int('0x9ff', 16):
color="\c004c7d3f"
else:
color = "\c009?9?9?"
try:
for caid in available_caids:
if int('0x900', 16) <= caid <= int('0x9ff', 16):
color="\c00eeee00"
except:
pass
res = color + 'NDS'
res += "\c00??????"
return res
def createCryptoConax(self, info):
available_caids = info.getInfoObject(iServiceInformation.sCAIDs)
if int('0xb00', 16) <= int(self.current_caid, 16) <= int('0xbff', 16):
color="\c004c7d3f"
else:
color = "\c009?9?9?"
try:
for caid in available_caids:
if int('0xb00', 16) <= caid <= int('0xbff', 16):
color="\c00eeee00"
except:
pass
res = color + 'CO'
res += "\c00??????"
return res
def createCryptoCryptoW(self, info):
available_caids = info.getInfoObject(iServiceInformation.sCAIDs)
if int('0xd00', 16) <= int(self.current_caid, 16) <= int('0xdff', 16):
color="\c004c7d3f"
else:
color = "\c009?9?9?"
try:
for caid in available_caids:
if int('0xd00', 16) <= caid <= int('0xdff', 16):
color="\c00eeee00"
except:
pass
res = color + 'CW'
res += "\c00??????"
return res
def createCryptoPowerVU(self, info):
available_caids = info.getInfoObject(iServiceInformation.sCAIDs)
if int('0xe00', 16) <= int(self.current_caid, 16) <= int('0xeff', 16):
color="\c004c7d3f"
else:
color = "\c009?9?9?"
try:
for caid in available_caids:
if int('0xe00', 16) <= caid <= int('0xeff', 16):
color="\c00eeee00"
except:
pass
res = color + 'P'
res += "\c00??????"
return res
def createCryptoBeta(self, info):
available_caids = info.getInfoObject(iServiceInformation.sCAIDs)
if int('0x1700', 16) <= int(self.current_caid, 16) <= int('0x17ff', 16):
color="\c004c7d3f"
else:
color = "\c009?9?9?"
try:
for caid in available_caids:
if int('0x1700', 16) <= caid <= int('0x17ff', 16):
color="\c00eeee00"
except:
pass
res = color + 'B'
res += "\c00??????"
return res
def createCryptoNagra(self, info):
available_caids = info.getInfoObject(iServiceInformation.sCAIDs)
if int('0x1800', 16) <= int(self.current_caid, 16) <= int('0x18ff', 16):
color="\c004c7d3f"
else:
color = "\c009?9?9?"
try:
for caid in available_caids:
if int('0x1800', 16) <= caid <= int('0x18ff', 16):
color="\c00eeee00"
except:
pass
res = color + 'N'
res += "\c00??????"
return res
def createCryptoBiss(self, info):
available_caids = info.getInfoObject(iServiceInformation.sCAIDs)
if int('0x2600', 16) <= int(self.current_caid, 16) <= int('0x26ff', 16):
color="\c004c7d3f"
else:
color = "\c009?9?9?"
try:
for caid in available_caids:
if int('0x2600', 16) <= caid <= int('0x26ff', 16):
color="\c00eeee00"
except:
pass
res = color + 'BI'
res += "\c00??????"
return res
def createCryptoDre(self, info):
available_caids = info.getInfoObject(iServiceInformation.sCAIDs)
if int('0x4ae0', 16) <= int(self.current_caid, 16) <= int('0x4ae1', 16):
color="\c004c7d3f"
else:
color = "\c009?9?9?"
try:
for caid in available_caids:
if int('0x4ae0', 16) <= caid <= int('0x4ae1', 16):
color="\c00eeee00"
except:
pass
res = color + 'DC'
res += "\c00??????"
return res
def createCryptoSpecial(self, info):
caid_name = "FTA"
try:
for caid_entry in self.caid_data:
if int(caid_entry[0], 16) <= int(self.current_caid, 16) <= int(caid_entry[1], 16):
caid_name = caid_entry[2]
break
return caid_name + ":%04x:%04x:%04x:%04x" % (int(self.current_caid,16), int(self.current_provid,16), info.getInfo(iServiceInformation.sSID), int(self.current_ecmpid,16))
except:
pass
return ""
def createResolution(self, info):
xres = info.getInfo(iServiceInformation.sVideoWidth)
if xres == -1:
return ""
yres = info.getInfo(iServiceInformation.sVideoHeight)
mode = ("i", "p", "", " ")[info.getInfo(iServiceInformation.sProgressive)]
fps = str((info.getInfo(iServiceInformation.sFrameRate) + 500) / 1000)
if int(fps) <= 0:
fps = ""
return str(xres) + "x" + str(yres) + mode + fps
def createVideoCodec(self, info):
return ("MPEG2", "MPEG4", "MPEG1", "MPEG4-II", "VC1", "VC1-SM", "")[info.getInfo(iServiceInformation.sVideoType)]
def createPIDInfo(self, info):
vpid = info.getInfo(iServiceInformation.sVideoPID)
apid = info.getInfo(iServiceInformation.sAudioPID)
pcrpid = info.getInfo(iServiceInformation.sPCRPID)
sidpid = info.getInfo(iServiceInformation.sSID)
tsid = info.getInfo(iServiceInformation.sTSID)
onid = info.getInfo(iServiceInformation.sONID)
if vpid < 0 : vpid = 0
if apid < 0 : apid = 0
if pcrpid < 0 : pcrpid = 0
if sidpid < 0 : sidpid = 0
if tsid < 0 : tsid = 0
if onid < 0 : onid = 0
return "%d-%d:%05d:%04d:%04d:%04d" % (onid, tsid, sidpid, vpid, apid, pcrpid)
def createTransponderInfo(self, fedata, feraw):
if "DVB-T" in feraw.get("tuner_type"):
tmp = addspace(self.createChannelNumber(fedata, feraw)) + self.createFrequency(fedata) + "/" + self.createPolarization(fedata)
else:
tmp = addspace(self.createFrequency(fedata)) + addspace(self.createPolarization(fedata))
return addspace(self.createTunerSystem(fedata)) + tmp + addspace(self.createSymbolRate(fedata, feraw)) + addspace(self.createFEC(fedata, feraw)) \
+ addspace(self.createModulation(fedata)) + self.createOrbPos(feraw)
def createFrequency(self, feraw):
frequency = feraw.get("frequency")
if frequency:
return str(frequency)
return ""
def createChannelNumber(self, fedata, feraw):
channel = channelnumbers.getChannelNumber(feraw.get("frequency"), feraw.get("tuner_number"))
if channel:
return _("CH") + "%s" % channel
return ""
def createSymbolRate(self, fedata, feraw):
if "DVB-T" in feraw.get("tuner_type"):
bandwidth = fedata.get("bandwidth")
if bandwidth:
return bandwidth
else:
symbolrate = fedata.get("symbol_rate")
if symbolrate:
return str(symbolrate)
return ""
def createPolarization(self, fedata):
polarization = fedata.get("polarization_abbreviation")
if polarization:
return polarization
return ""
def createFEC(self, fedata, feraw):
if "DVB-T" in feraw.get("tuner_type"):
code_rate_lp = fedata.get("code_rate_lp")
code_rate_hp = fedata.get("code_rate_hp")
if code_rate_lp and code_rate_hp:
return code_rate_lp + "-" + code_rate_hp
else:
fec = fedata.get("fec_inner")
if fec:
return fec
return ""
def createModulation(self, fedata):
if fedata.get("tuner_type") == _("Terrestrial"):
constellation = fedata.get("constellation")
if constellation:
return constellation
else:
modulation = fedata.get("modulation")
if modulation:
return modulation
return ""
def createTunerType(self, feraw):
tunertype = feraw.get("tuner_type")
if tunertype:
return tunertype
return ""
def createTunerSystem(self, fedata):
tunersystem = fedata.get("system")
if tunersystem:
return tunersystem
return ""
def createOrbPos(self, feraw):
orbpos = feraw.get("orbital_position")
if orbpos > 1800:
return str((float(3600 - orbpos)) / 10.0) + "\xc2\xb0 W"
elif orbpos > 0:
return str((float(orbpos)) / 10.0) + "\xc2\xb0 E"
return ""
def createOrbPosOrTunerSystem(self, fedata,feraw):
orbpos = self.createOrbPos(feraw)
if orbpos is not "":
return orbpos
return self.createTunerSystem(fedata)
def createTransponderName(self,feraw):
orb_pos = ""
orbpos = feraw.get("orbital_position")
if orbpos > 1800:
if orbpos == 3590:
orb_pos = 'Thor/Intelsat'
elif orbpos == 3560:
orb_pos = 'Amos (4'
elif orbpos == 3550:
orb_pos = 'Atlantic Bird'
elif orbpos == 3530:
orb_pos = 'Nilesat/Atlantic Bird'
elif orbpos == 3520:
orb_pos = 'Atlantic Bird'
elif orbpos == 3475:
orb_pos = 'Atlantic Bird'
elif orbpos == 3460:
orb_pos = 'Express'
elif orbpos == 3450:
orb_pos = 'Telstar'
elif orbpos == 3420:
orb_pos = 'Intelsat'
elif orbpos == 3380:
orb_pos = 'Nss'
elif orbpos == 3355:
orb_pos = 'Intelsat'
elif orbpos == 3325:
orb_pos = 'Intelsat'
elif orbpos == 3300:
orb_pos = 'Hispasat'
elif orbpos == 3285:
orb_pos = 'Intelsat'
elif orbpos == 3170:
orb_pos = 'Intelsat'
elif orbpos == 3150:
orb_pos = 'Intelsat'
elif orbpos == 3070:
orb_pos = 'Intelsat'
elif orbpos == 3045:
orb_pos = 'Intelsat'
elif orbpos == 3020:
orb_pos = 'Intelsat 9'
elif orbpos == 2990:
orb_pos = 'Amazonas'
elif orbpos == 2900:
orb_pos = 'Star One'
elif orbpos == 2880:
orb_pos = 'AMC 6 (72'
elif orbpos == 2875:
orb_pos = 'Echostar 6'
elif orbpos == 2860:
orb_pos = 'Horizons'
elif orbpos == 2810:
orb_pos = 'AMC5'
elif orbpos == 2780:
orb_pos = 'NIMIQ 4'
elif orbpos == 2690:
orb_pos = 'NIMIQ 1'
elif orbpos == 3592:
orb_pos = 'Thor/Intelsat'
elif orbpos == 2985:
orb_pos = 'Echostar 3,12'
elif orbpos == 2830:
orb_pos = 'Echostar 8'
elif orbpos == 2630:
orb_pos = 'Galaxy 19'
elif orbpos == 2500:
orb_pos = 'Echostar 10,11'
elif orbpos == 2502:
orb_pos = 'DirectTV 5'
elif orbpos == 2410:
orb_pos = 'Echostar 7 Anik F3'
elif orbpos == 2391:
orb_pos = 'Galaxy 23'
elif orbpos == 2390:
orb_pos = 'Echostar 9'
elif orbpos == 2412:
orb_pos = 'DirectTV 7S'
elif orbpos == 2310:
orb_pos = 'Galaxy 27'
elif orbpos == 2311:
orb_pos = 'Ciel 2'
elif orbpos == 2120:
orb_pos = 'Echostar 2'
else:
orb_pos = str((float(3600 - orbpos)) / 10.0) + "W"
elif orbpos > 0:
if orbpos == 192:
orb_pos = 'Astra 1F'
elif orbpos == 130:
orb_pos = 'Hot Bird 6,7A,8'
elif orbpos == 235:
orb_pos = 'Astra 1E'
elif orbpos == 1100:
orb_pos = 'BSat 1A,2A'
elif orbpos == 1101:
orb_pos = 'N-Sat 110'
elif orbpos == 1131:
orb_pos = 'KoreaSat 5'
elif orbpos == 1440:
orb_pos = 'SuperBird 7,C2'
elif orbpos == 1006:
orb_pos = 'AsiaSat 2'
elif orbpos == 1030:
orb_pos = 'Express A2'
elif orbpos == 1056:
orb_pos = 'Asiasat 3S'
elif orbpos == 1082:
orb_pos = 'NSS 11'
elif orbpos == 881:
orb_pos = 'ST1'
elif orbpos == 900:
orb_pos = 'Yamal 201'
elif orbpos == 917:
orb_pos = 'Mesat'
elif orbpos == 950:
orb_pos = 'Insat 4B'
elif orbpos == 951:
orb_pos = 'NSS 6'
elif orbpos == 765:
orb_pos = 'Telestar'
elif orbpos == 785:
orb_pos = 'ThaiCom 5'
elif orbpos == 800:
orb_pos = 'Express'
elif orbpos == 830:
orb_pos = 'Insat 4A'
elif orbpos == 850:
orb_pos = 'Intelsat 709'
elif orbpos == 750:
orb_pos = 'Abs'
elif orbpos == 720:
orb_pos = 'Intelsat'
elif orbpos == 705:
orb_pos = 'Eutelsat W5'
elif orbpos == 685:
orb_pos = 'Intelsat'
elif orbpos == 620:
orb_pos = 'Intelsat 902'
elif orbpos == 600:
orb_pos = 'Intelsat 904'
elif orbpos == 570:
orb_pos = 'Nss'
elif orbpos == 530:
orb_pos = 'Express AM22'
elif orbpos == 480:
orb_pos = 'Eutelsat 2F2'
elif orbpos == 450:
orb_pos = 'Intelsat'
elif orbpos == 420:
orb_pos = 'Turksat 2A'
elif orbpos == 400:
orb_pos = 'Express AM1'
elif orbpos == 390:
orb_pos = 'Hellas Sat 2'
elif orbpos == 380:
orb_pos = 'Paksat 1'
elif orbpos == 360:
orb_pos = 'Eutelsat Sesat'
elif orbpos == 335:
orb_pos = 'Astra 1M'
elif orbpos == 330:
orb_pos = 'Eurobird 3'
elif orbpos == 328:
orb_pos = 'Galaxy 11'
elif orbpos == 315:
orb_pos = 'Astra 5A'
elif orbpos == 310:
orb_pos = 'Turksat'
elif orbpos == 305:
orb_pos = 'Arabsat'
elif orbpos == 285:
orb_pos = 'Eurobird 1'
elif orbpos == 284:
orb_pos = 'Eurobird/Astra'
elif orbpos == 282:
orb_pos = 'Eurobird/Astra'
elif orbpos == 1220:
orb_pos = 'AsiaSat'
elif orbpos == 1380:
orb_pos = 'Telstar 18'
elif orbpos == 260:
orb_pos = 'Badr 3/4'
elif orbpos == 255:
orb_pos = 'Eurobird 2'
elif orbpos == 215:
orb_pos = 'Eutelsat'
elif orbpos == 216:
orb_pos = 'Eutelsat W6'
elif orbpos == 210:
orb_pos = 'AfriStar 1'
elif orbpos == 160:
orb_pos = 'Eutelsat W2'
elif orbpos == 100:
orb_pos = 'Eutelsat W1'
elif orbpos == 90:
orb_pos = 'Eurobird 9'
elif orbpos == 70:
orb_pos = 'Eutelsat W3A'
elif orbpos == 50:
orb_pos = 'Sirius 4'
elif orbpos == 48:
orb_pos = 'Sirius 4'
elif orbpos == 30:
orb_pos = 'Telecom 2'
else:
orb_pos = str((float(orbpos)) / 10.0) + "E"
return orb_pos
def createProviderName(self,info):
return info.getInfoString(iServiceInformation.sProvider)
@cached
def getText(self):
service = self.source.service
if service is None:
return ""
info = service and service.info()
if not info:
return ""
if self.type == "CryptoInfo":
self.getCryptoInfo(info)
if int(config.usage.show_cryptoinfo.value) > 0:
return addspace(self.createCryptoBar(info)) + self.createCryptoSpecial(info)
else:
return addspace(self.createCryptoBar(info)) + addspace(self.current_source) + self.createCryptoSpecial(info)
if self.type == "CryptoBar":
if int(config.usage.show_cryptoinfo.value) > 0:
self.getCryptoInfo(info)
return self.createCryptoBar(info)
else:
return ""
if self.type == "CryptoSeca":
if int(config.usage.show_cryptoinfo.value) > 0:
self.getCryptoInfo(info)
return self.createCryptoSeca(info)
else:
return ""
if self.type == "CryptoVia":
if int(config.usage.show_cryptoinfo.value) > 0:
self.getCryptoInfo(info)
return self.createCryptoVia(info)
else:
return ""
if self.type == "CryptoIrdeto":
if int(config.usage.show_cryptoinfo.value) > 0:
self.getCryptoInfo(info)
return self.createCryptoIrdeto(info)
else:
return ""
if self.type == "CryptoNDS":
if int(config.usage.show_cryptoinfo.value) > 0:
self.getCryptoInfo(info)
return self.createCryptoNDS(info)
else:
return ""
if self.type == "CryptoConax":
if int(config.usage.show_cryptoinfo.value) > 0:
self.getCryptoInfo(info)
return self.createCryptoConax(info)
else:
return ""
if self.type == "CryptoCryptoW":
if int(config.usage.show_cryptoinfo.value) > 0:
self.getCryptoInfo(info)
return self.createCryptoCryptoW(info)
else:
return ""
if self.type == "CryptoBeta":
if int(config.usage.show_cryptoinfo.value) > 0:
self.getCryptoInfo(info)
return self.createCryptoBeta(info)
else:
return ""
if self.type == "CryptoNagra":
if int(config.usage.show_cryptoinfo.value) > 0:
self.getCryptoInfo(info)
return self.createCryptoNagra(info)
else:
return ""
if self.type == "CryptoBiss":
if int(config.usage.show_cryptoinfo.value) > 0:
self.getCryptoInfo(info)
return self.createCryptoBiss(info)
else:
return ""
if self.type == "CryptoDre":
if int(config.usage.show_cryptoinfo.value) > 0:
self.getCryptoInfo(info)
return self.createCryptoDre(info)
else:
return ""
if self.type == "CryptoSpecial":
if int(config.usage.show_cryptoinfo.value) > 0:
self.getCryptoInfo(info)
return self.createCryptoSpecial(info)
else:
return ""
if self.type == "ResolutionString":
return self.createResolution(info)
if self.type == "VideoCodec":
return self.createVideoCodec(info)
if self.updateFEdata:
feinfo = service.frontendInfo()
if feinfo:
self.feraw = feinfo.getAll(config.usage.infobar_frontend_source.value == "settings")
if self.feraw:
self.fedata = ConvertToHumanReadable(self.feraw)
feraw = self.feraw
fedata = self.fedata
if not feraw or not fedata:
return ""
if self.type == "All":
self.getCryptoInfo(info)
if int(config.usage.show_cryptoinfo.value) > 0:
return addspace(self.createProviderName(info)) + self.createTransponderInfo(fedata,feraw) + addspace(self.createTransponderName(feraw)) + "\n"\
+ addspace(self.createCryptoBar(info)) + addspace(self.createCryptoSpecial(info)) + "\n"\
+ addspace(self.createPIDInfo(info)) + addspace(self.createVideoCodec(info)) + self.createResolution(info)
else:
return addspace(self.createProviderName(info)) + self.createTransponderInfo(fedata,feraw) + addspace(self.createTransponderName(feraw)) + "\n" \
+ addspace(self.createCryptoBar(info)) + self.current_source + "\n" \
+ addspace(self.createCryptoSpecial(info)) + addspace(self.createVideoCodec(info)) + self.createResolution(info)
if self.type == "ServiceInfo":
return addspace(self.createProviderName(info)) + addspace(self.createTunerSystem(fedata)) + addspace(self.createFrequency(feraw)) + addspace(self.createPolarization(fedata)) \
+ addspace(self.createSymbolRate(fedata, feraw)) + addspace(self.createFEC(fedata, feraw)) + addspace(self.createModulation(fedata)) + addspace(self.createOrbPos(feraw)) + addspace(self.createTransponderName(feraw))\
+ addspace(self.createVideoCodec(info)) + self.createResolution(info)
if self.type == "TransponderInfo2line":
return addspace(self.createProviderName(info)) + addspace(self.createTunerSystem(fedata)) + addspace(self.createTransponderName(feraw)) + '\n'\
+ addspace(self.createFrequency(fedata)) + addspace(self.createPolarization(fedata))\
+ addspace(self.createSymbolRate(fedata, feraw)) + self.createModulation(fedata) + '-' + addspace(self.createFEC(fedata, feraw))
if self.type == "TransponderInfo":
return self.createTransponderInfo(fedata,feraw)
if self.type == "TransponderFrequency":
return self.createFrequency(feraw)
if self.type == "TransponderSymbolRate":
return self.createSymbolRate(fedata, feraw)
if self.type == "TransponderPolarization":
return self.createPolarization(fedata)
if self.type == "TransponderFEC":
return self.createFEC(fedata, feraw)
if self.type == "TransponderModulation":
return self.createModulation(fedata)
if self.type == "OrbitalPosition":
return self.createOrbPos(feraw)
if self.type == "TunerType":
return self.createTunerType(feraw)
if self.type == "TunerSystem":
return self.createTunerSystem(fedata)
if self.type == "OrbitalPositionOrTunerSystem":
return self.createOrbPosOrTunerSystem(fedata,feraw)
if self.type == "PIDInfo":
return self.createPIDInfo(info)
if self.type == "TerrestrialChannelNumber":
return self.createChannelNumber(fedata, feraw)
return _("invalid type")
text = property(getText)
@cached
def getBool(self):
service = self.source.service
info = service and service.info()
if not info:
return False
request_caid = None
for x in self.ca_table:
if x[0] == self.type:
request_caid = x[1]
request_selected = x[2]
break
if request_caid is None:
return False
if info.getInfo(iServiceInformation.sIsCrypted) != 1:
return False
data = self.ecmdata.getEcmData()
if data is None:
return False
current_caid = data[1]
available_caids = info.getInfoObject(iServiceInformation.sCAIDs)
for caid_entry in self.caid_data:
if caid_entry[3] == request_caid:
if request_selected:
if int(caid_entry[0], 16) <= int(current_caid, 16) <= int(caid_entry[1], 16):
return True
else: # request available
try:
for caid in available_caids:
if int(caid_entry[0], 16) <= caid <= int(caid_entry[1], 16):
return True
except:
pass
return False
boolean = property(getBool)
def changed(self, what):
if what[0] == self.CHANGED_SPECIFIC:
self.updateFEdata = False
if what[1] == iPlayableService.evNewProgramInfo:
self.updateFEdata = True
if what[1] == iPlayableService.evEnd:
self.feraw = self.fedata = None
Converter.changed(self, what)
elif what[0] == self.CHANGED_POLL and self.updateFEdata is not None:
self.updateFEdata = False
Converter.changed(self, what)
|
kingvuplus/italysat-enigma2
|
lib/python/Components/Converter/PliExtraInfo.py
|
Python
|
gpl-2.0
| 24,998
|
[
"Galaxy"
] |
445b1f21cbb1f74f6a5ab925c1f7207d002bca5ff1630db9f998508670e2506c
|
"""The tasks module provides a simple light-weight alternative to threads.
When you have a long-running job you will want to run it in the background,
while the user does other things. There are four ways to do this:
- Use a new thread for each task.
- Use callbacks from an idle handler.
- Use a recursive mainloop.
- Use this module.
Using threads causes a number of problems. Some builds of pygtk/python don't
support them, they introduce race conditions, often lead to many subtle
bugs, and they require lots of resources (you probably wouldn't want 10,000
threads running at once). In particular, two threads can run at exactly the
same time (perhaps on different processors), so you have to be really careful
that they don't both try to update the same variable at the same time. This
requires lots of messy locking, which is hard to get right.
Callbacks work within a single thread. For example, you open a dialog box and
then tell the system to call one function if it's closed, and another if the
user clicks OK, etc. The function that opened the box then returns, and the
system calls one of the given callback functions later. Callbacks only
execute one at a time, so you don't have to worry about race conditions.
However, they are often very awkward to program with, because you have to
save state somewhere and then pass it to the functions when they're called.
A recursive mainloop only works with nested tasks (you can create a
sub-task, but the main task can't continue until the sub-task has
finished). We use these for, eg, rox.alert() boxes since you don't
normally want to do anything else until the box is closed, but it is not
appropriate for long-running jobs.
Tasks use python's generator API to provide a more pleasant interface to
callbacks. See the Task class (below) for more information.
"""
# Copyright (C) 2009, Thomas Leonard
# See the README file for details, or visit http://0install.net.
from zeroinstall import _
import sys
from logging import info, warn
import gobject
# The list of Blockers whose event has happened, in the order they were
# triggered
_run_queue = []
def check(blockers, reporter = None):
"""See if any of the blockers have pending exceptions.
@param reporter: invoke this function on each error
If reporter is None, raise the first and log the rest."""
ex = None
if isinstance(blockers, Blocker):
blockers = (blockers,)
for b in blockers:
if b.exception:
b.exception_read = True
if reporter:
try:
reporter(*b.exception)
except:
warn("Failure reporting error! Error was: %s", repr(b.exception[0]))
raise
elif ex is None:
ex = b.exception
else:
warn(_("Multiple exceptions waiting; skipping %s"), b.exception[0])
if ex:
raise ex[0], None, ex[1]
class Blocker:
"""A Blocker object starts life with 'happened = False'. Tasks can
ask to be suspended until 'happened = True'. The value is changed
by a call to trigger().
Example:
>>> kettle_boiled = tasks.Blocker()
>>> def make_tea():
print "Get cup"
print "Add tea leaves"
yield kettle_boiled
print "Pour water into cup"
print "Brew..."
yield tasks.TimeoutBlocker(120)
print "Add milk"
print "Ready!"
>>> tasks.Task(make_tea())
Then elsewhere, later::
print "Kettle boiled!"
kettle_boiled.trigger()
You can also yield a list of Blockers. Your function will resume
after any one of them is triggered. Use blocker.happened to
find out which one(s). Yielding a Blocker that has already
happened is the same as yielding None (gives any other Tasks a
chance to run, and then continues).
"""
exception = None
def __init__(self, name):
self.happened = False # False until event triggered
self._zero_lib_tasks = set() # Tasks waiting on this blocker
self.name = name
def trigger(self, exception = None):
"""The event has happened. Note that this cannot be undone;
instead, create a new Blocker to handle the next occurance
of the event.
@param exception: exception to raise in waiting tasks
@type exception: (Exception, traceback)"""
if self.happened: return # Already triggered
self.happened = True
self.exception = exception
self.exception_read = False
#assert self not in _run_queue # Slow
if not _run_queue:
_schedule()
_run_queue.append(self)
if exception:
assert isinstance(exception, tuple), exception
if not self._zero_lib_tasks:
info(_("Exception from '%s', but nothing is waiting for it"), self)
#import traceback
#traceback.print_exception(exception[0], None, exception[1])
def __del__(self):
if self.exception and not self.exception_read:
warn(_("Blocker %(blocker)s garbage collected without having it's exception read: %(exception)s"), {'blocker': self, 'exception': self.exception})
def add_task(self, task):
"""Called by the schedular when a Task yields this
Blocker. If you override this method, be sure to still
call this method with Blocker.add_task(self)!"""
self._zero_lib_tasks.add(task)
def remove_task(self, task):
"""Called by the schedular when a Task that was waiting for
this blocker is resumed."""
self._zero_lib_tasks.remove(task)
def __repr__(self):
return "<Blocker:%s>" % self
def __str__(self):
return self.name
class IdleBlocker(Blocker):
"""An IdleBlocker blocks until a task starts waiting on it, then
immediately triggers. An instance of this class is used internally
when a Task yields None."""
def add_task(self, task):
"""Also calls trigger."""
Blocker.add_task(self, task)
self.trigger()
class TimeoutBlocker(Blocker):
"""Triggers after a set number of seconds."""
def __init__(self, timeout, name):
"""Trigger after 'timeout' seconds (may be a fraction)."""
Blocker.__init__(self, name)
gobject.timeout_add(long(timeout * 1000), self._timeout)
def _timeout(self):
self.trigger()
def _io_callback(src, cond, blocker):
blocker.trigger()
return False
class InputBlocker(Blocker):
"""Triggers when os.read(stream) would not block."""
_tag = None
_stream = None
def __init__(self, stream, name):
Blocker.__init__(self, name)
self._stream = stream
def add_task(self, task):
Blocker.add_task(self, task)
if self._tag is None:
self._tag = gobject.io_add_watch(self._stream, gobject.IO_IN | gobject.IO_HUP,
_io_callback, self)
def remove_task(self, task):
Blocker.remove_task(self, task)
if not self._zero_lib_tasks:
gobject.source_remove(self._tag)
self._tag = None
class OutputBlocker(Blocker):
"""Triggers when os.write(stream) would not block."""
_tag = None
_stream = None
def __init__(self, stream, name):
Blocker.__init__(self, name)
self._stream = stream
def add_task(self, task):
Blocker.add_task(self, task)
if self._tag is None:
self._tag = gobject.io_add_watch(self._stream, gobject.IO_OUT | gobject.IO_HUP,
_io_callback, self)
def remove_task(self, task):
Blocker.remove_task(self, task)
if not self._zero_lib_tasks:
gobject.source_remove(self._tag)
self._tag = None
_idle_blocker = IdleBlocker("(idle)")
class Task:
"""Create a new Task when you have some long running function to
run in the background, but which needs to do work in 'chunks'.
Example:
>>> from zeroinstall import tasks
>>> def my_task(start):
for x in range(start, start + 5):
print "x =", x
yield None
>>> tasks.Task(my_task(0))
>>> tasks.Task(my_task(10))
>>> mainloop()
Yielding None gives up control of the processor to another Task,
causing the sequence printed to be interleaved. You can also yield a
Blocker (or a list of Blockers) if you want to wait for some
particular event before resuming (see the Blocker class for details).
"""
def __init__(self, iterator, name):
"""Call iterator.next() from a glib idle function. This function
can yield Blocker() objects to suspend processing while waiting
for events. name is used only for debugging."""
assert iterator.next, "Object passed is not an iterator!"
self.iterator = iterator
self.finished = Blocker(name)
# Block new task on the idle handler...
_idle_blocker.add_task(self)
self._zero_blockers = (_idle_blocker,)
info(_("Scheduling new task: %s"), self)
def _resume(self):
# Remove from our blockers' queues
for blocker in self._zero_blockers:
blocker.remove_task(self)
# Resume the task
try:
new_blockers = self.iterator.next()
except StopIteration:
# Task ended
self.finished.trigger()
return
except SystemExit:
raise
except (Exception, KeyboardInterrupt), ex:
# Task crashed
info(_("Exception from '%(name)s': %(exception)s"), {'name': self.finished.name, 'exception': ex})
#import traceback
#traceback.print_exc()
tb = sys.exc_info()[2]
self.finished.trigger(exception = (ex, tb))
return
if new_blockers is None:
# Just give up control briefly
new_blockers = (_idle_blocker,)
else:
if isinstance(new_blockers, Blocker):
# Wrap a single yielded blocker into a list
new_blockers = (new_blockers,)
# Are we blocking on something that already happened?
for blocker in new_blockers:
assert hasattr(blocker, 'happened'), "Not a Blocker: %s from %s" % (blocker, self)
if blocker.happened:
new_blockers = (_idle_blocker,)
info(_("Task '%(task)s' waiting on ready blocker %(blocker)s!"), {'task': self, 'blocker': blocker})
break
else:
info(_("Task '%(task)s' stopping and waiting for '%(new_blockers)s'"), {'task': self, 'new_blockers': new_blockers})
# Add to new blockers' queues
for blocker in new_blockers:
blocker.add_task(self)
self._zero_blockers = new_blockers
def __repr__(self):
return "Task(%s)" % self.finished.name
def __str__(self):
return self.finished.name
# Must append to _run_queue right after calling this!
def _schedule():
assert not _run_queue
gobject.idle_add(_handle_run_queue)
def _handle_run_queue():
global _idle_blocker
assert _run_queue
next = _run_queue[0]
assert next.happened
if next is _idle_blocker:
# Since this blocker will never run again, create a
# new one for future idling.
_idle_blocker = IdleBlocker("(idle)")
elif next._zero_lib_tasks:
info(_("Running %(task)s due to triggering of '%(next)s'"), {'task': next._zero_lib_tasks, 'next': next})
else:
info(_("Running %s"), next)
tasks = frozenset(next._zero_lib_tasks)
if tasks:
next.noticed = True
for task in tasks:
# Run 'task'.
task._resume()
del _run_queue[0]
if _run_queue:
return True
return False
def named_async(name):
"""Decorator that turns a generator function into a function that runs the
generator as a Task and returns the Task's finished blocker.
@param name: the name for the Task"""
def deco(fn):
def run(*args, **kwargs):
return Task(fn(*args, **kwargs), name).finished
run.__name__ = fn.__name__
return run
return deco
def async(fn):
"""Decorator that turns a generator function into a function that runs the
generator as a Task and returns the Task's finished blocker."""
def run(*args, **kwargs):
return Task(fn(*args, **kwargs), fn.__name__).finished
run.__name__ = fn.__name__
return run
|
pombredanne/zero-install
|
zeroinstall/support/tasks.py
|
Python
|
lgpl-2.1
| 11,127
|
[
"VisIt"
] |
11f34404497f8c5684b5337e90474bdc917eff258cffa199e1086a81f84604ef
|
# -*- coding: utf-8 -*-
import os
import sys
def check_cclib(cclib):
"""Make sure we are importing code from a subdirectory, which should exist
and should have been updated just before running this script. Note that
this script does not assume any version in the module and just takes
what it finds... so an appropriate checkout should be done first."""
if cclib.__file__[:len(os.getcwd())] != os.getcwd():
print("Do not seem to be importing from current directory")
sys.exit(1)
|
cclib/cclib
|
doc/sphinx/docs_common.py
|
Python
|
bsd-3-clause
| 518
|
[
"cclib"
] |
867bb35f4f836a06b862595771a8f0cce94a2ec27e22359100b5a2fc78baf694
|
import os
import time
import tarfile
import xml.sax
import numpy as np
from gpaw.mpi import broadcast as mpi_broadcast
from gpaw.mpi import world
from gpaw.io import FileReference
intsize = 4
floatsize = np.array([1], float).itemsize
complexsize = np.array([1], complex).itemsize
itemsizes = {'int': intsize, 'float': floatsize, 'complex': complexsize}
class Writer:
def __init__(self, name, comm=world):
self.comm = comm # for possible future use
self.dims = {}
self.files = {}
self.xml1 = ['<gpaw_io version="0.1" endianness="%s">' %
('big', 'little')[int(np.little_endian)]]
self.xml2 = []
if os.path.isfile(name):
os.rename(name, name[:-4] + '.old'+name[-4:])
self.tar = tarfile.open(name, 'w')
self.mtime = int(time.time())
def dimension(self, name, value):
if name in self.dims.keys() and self.dims[name] != value:
raise Warning('Dimension %s changed from %s to %s' % \
(name, self.dims[name], value))
self.dims[name] = value
def __setitem__(self, name, value):
if isinstance(value, float):
value = repr(value)
self.xml1 += [' <parameter %-20s value="%s"/>' %
('name="%s"' % name, value)]
def add(self, name, shape, array=None, dtype=None, units=None,
parallel=False, write=True):
if array is not None:
array = np.asarray(array)
self.dtype, type, itemsize = self.get_data_type(array, dtype)
self.xml2 += [' <array name="%s" type="%s">' % (name, type)]
self.xml2 += [' <dimension length="%s" name="%s"/>' %
(self.dims[dim], dim)
for dim in shape]
self.xml2 += [' </array>']
self.shape = [self.dims[dim] for dim in shape]
size = itemsize * np.product([self.dims[dim] for dim in shape])
self.write_header(name, size)
if array is not None:
self.fill(array)
def get_data_type(self, array=None, dtype=None):
if dtype is None:
dtype = array.dtype
if dtype in [int, bool]:
dtype = np.int32
dtype = np.dtype(dtype)
type = {np.int32: 'int',
np.float64: 'float',
np.complex128: 'complex'}[dtype.type]
return dtype, type, dtype.itemsize
def fill(self, array, *indices, **kwargs):
self.write(np.asarray(array, self.dtype).tostring())
def write_header(self, name, size):
assert name not in self.files.keys()
tarinfo = tarfile.TarInfo(name)
tarinfo.mtime = self.mtime
tarinfo.size = size
self.files[name] = tarinfo
self.size = size
self.n = 0
self.tar.addfile(tarinfo)
def write(self, string):
self.tar.fileobj.write(string)
self.n += len(string)
if self.n == self.size:
blocks, remainder = divmod(self.size, tarfile.BLOCKSIZE)
if remainder > 0:
self.tar.fileobj.write('\0' * (tarfile.BLOCKSIZE - remainder))
blocks += 1
self.tar.offset += blocks * tarfile.BLOCKSIZE
def close(self):
self.xml2 += ['</gpaw_io>\n']
string = '\n'.join(self.xml1 + self.xml2)
self.write_header('info.xml', len(string))
self.write(string)
self.tar.close()
class Reader(xml.sax.handler.ContentHandler):
def __init__(self, name, comm=world):
self.comm = comm # used for broadcasting replicated data
self.master = (self.comm.rank == 0)
self.dims = {}
self.shapes = {}
self.dtypes = {}
self.parameters = {}
xml.sax.handler.ContentHandler.__init__(self)
self.tar = tarfile.open(name, 'r')
f = self.tar.extractfile('info.xml')
xml.sax.parse(f, self)
def startElement(self, tag, attrs):
if tag == 'gpaw_io':
self.byteswap = ((attrs['endianness'] == 'little')
!= np.little_endian)
elif tag == 'array':
name = attrs['name']
self.dtypes[name] = attrs['type']
self.shapes[name] = []
self.name = name
elif tag == 'dimension':
n = int(attrs['length'])
self.shapes[self.name].append(n)
self.dims[attrs['name']] = n
else:
assert tag == 'parameter'
try:
value = eval(attrs['value'], {})
except (SyntaxError, NameError):
value = attrs['value'].encode()
self.parameters[attrs['name']] = value
def dimension(self, name):
return self.dims[name]
def __getitem__(self, name):
return self.parameters[name]
def has_array(self, name):
return name in self.shapes
def get(self, name, *indices, **kwargs):
broadcast = kwargs.pop('broadcast', False)
if self.master or not broadcast:
fileobj, shape, size, dtype = self.get_file_object(name, indices)
array = np.fromstring(fileobj.read(size), dtype)
if self.byteswap:
array = array.byteswap()
if dtype == np.int32:
array = np.asarray(array, int)
array.shape = shape
if shape == ():
array = array.item()
else:
array = None
if broadcast:
array = mpi_broadcast(array, 0, self.comm)
return array
def get_reference(self, name, *indices):
fileobj, shape, size, dtype = self.get_file_object(name, indices)
assert dtype != np.int32
return TarFileReference(fileobj, shape, dtype, self.byteswap)
def get_file_object(self, name, indices):
dtype, type, itemsize = self.get_data_type(name)
fileobj = self.tar.extractfile(name)
n = len(indices)
shape = self.shapes[name]
size = itemsize * np.prod(shape[n:], dtype=int)
offset = 0
stride = size
for i in range(n - 1, -1, -1):
offset += indices[i] * stride
stride *= shape[i]
fileobj.seek(offset)
return fileobj, shape[n:], size, dtype
def get_data_type(self, name):
type = self.dtypes[name]
dtype = np.dtype({'int': np.int32,
'float': float,
'complex': complex}[type])
return dtype, type, dtype.itemsize
def get_parameters(self):
return self.parameters
def close(self):
self.tar.close()
class TarFileReference(FileReference):
def __init__(self, fileobj, shape, dtype, byteswap):
self.fileobj = fileobj
self.shape = tuple(shape)
self.dtype = dtype
self.itemsize = dtype.itemsize
self.byteswap = byteswap
self.offset = fileobj.tell()
def __len__(self):
return self.shape[0]
def __getitem__(self, indices):
if isinstance(indices, slice):
start, stop, step = indices.indices(len(self))
if start != 0 or step != 1 or stop != len(self):
raise NotImplementedError('You can only slice a TarReference '
'with [:] or [int]')
else:
indices = ()
elif isinstance(indices, int):
indices = (indices,)
else: # Probably tuple or ellipsis
raise NotImplementedError('You can only slice a TarReference '
'with [:] or [int]')
n = len(indices)
size = np.prod(self.shape[n:], dtype=int) * self.itemsize
offset = self.offset
stride = size
for i in range(n - 1, -1, -1):
offset += indices[i] * stride
stride *= self.shape[i]
self.fileobj.seek(offset)
array = np.fromstring(self.fileobj.read(size), self.dtype)
if self.byteswap:
array = array.byteswap()
array.shape = self.shape[n:]
return array
|
ajylee/gpaw-rtxs
|
gpaw/io/tar.py
|
Python
|
gpl-3.0
| 8,152
|
[
"GPAW"
] |
89a4dc57f72eda0c2a4233aa72d545a348cb48dc12520a700b8d73e685691e42
|
# -*- coding: utf-8 -*-
import os
import re
import uuid
import urllib
import logging
import datetime
import urlparse
from collections import OrderedDict
import warnings
import pytz
import blinker
from flask import request
from django.core.urlresolvers import reverse
from HTMLParser import HTMLParser
from modularodm import Q
from modularodm import fields
from modularodm.validators import MaxLengthValidator
from modularodm.exceptions import ValidationTypeError
from modularodm.exceptions import ValidationValueError
from api.base.utils import absolute_reverse
from framework import status
from framework.mongo import ObjectId
from framework.mongo import StoredObject
from framework.addons import AddonModelMixin
from framework.auth import get_user, User, Auth
from framework.auth import signals as auth_signals
from framework.exceptions import PermissionsError
from framework.guid.model import GuidStoredObject
from framework.auth.utils import privacy_info_handle
from framework.analytics import tasks as piwik_tasks
from framework.mongo.utils import to_mongo, to_mongo_key, unique_on
from framework.analytics import (
get_basic_counters, increment_user_activity_counters
)
from framework.sentry import log_exception
from framework.transactions.context import TokuTransaction
from website import language
from website import settings
from website.util import web_url_for
from website.util import api_url_for
from website.exceptions import NodeStateError
from website.citations.utils import datetime_to_csl
from website.identifiers.model import IdentifierMixin
from website.util.permissions import expand_permissions
from website.util.permissions import CREATOR_PERMISSIONS
from website.project.metadata.schemas import OSF_META_SCHEMAS
from website.util.permissions import DEFAULT_CONTRIBUTOR_PERMISSIONS
html_parser = HTMLParser()
logger = logging.getLogger(__name__)
def has_anonymous_link(node, auth):
"""check if the node is anonymous to the user
:param Node node: Node which the user wants to visit
:param str link: any view-only link in the current url
:return bool anonymous: Whether the node is anonymous to the user or not
"""
view_only_link = auth.private_key or request.args.get('view_only', '').strip('/')
if not view_only_link:
return False
if node.is_public:
return False
return any(
link.anonymous
for link in node.private_links_active
if link.key == view_only_link
)
signals = blinker.Namespace()
contributor_added = signals.signal('contributor-added')
unreg_contributor_added = signals.signal('unreg-contributor-added')
write_permissions_revoked = signals.signal('write-permissions-revoked')
class MetaSchema(StoredObject):
_id = fields.StringField(default=lambda: str(ObjectId()))
name = fields.StringField()
schema = fields.DictionaryField()
category = fields.StringField()
# Version of the Knockout metadata renderer to use (e.g. if data binds
# change)
metadata_version = fields.IntegerField()
# Version of the schema to use (e.g. if questions, responses change)
schema_version = fields.IntegerField()
def ensure_schemas(clear=True):
"""Import meta-data schemas from JSON to database, optionally clearing
database first.
:param clear: Clear schema database before import
"""
if clear:
try:
MetaSchema.remove()
except AttributeError:
if not settings.DEBUG_MODE:
raise
for schema in OSF_META_SCHEMAS:
try:
MetaSchema.find_one(
Q('name', 'eq', schema['name']) &
Q('schema_version', 'eq', schema['schema_version'])
)
except:
schema['name'] = schema['name'].replace(' ', '_')
schema_obj = MetaSchema(**schema)
schema_obj.save()
class MetaData(GuidStoredObject):
_id = fields.StringField(primary=True)
target = fields.AbstractForeignField(backref='metadata')
data = fields.DictionaryField()
date_created = fields.DateTimeField(auto_now_add=datetime.datetime.utcnow)
date_modified = fields.DateTimeField(auto_now=datetime.datetime.utcnow)
def validate_comment_reports(value, *args, **kwargs):
for key, val in value.iteritems():
if not User.load(key):
raise ValidationValueError('Keys must be user IDs')
if not isinstance(val, dict):
raise ValidationTypeError('Values must be dictionaries')
if 'category' not in val or 'text' not in val:
raise ValidationValueError(
'Values must include `category` and `text` keys'
)
class Comment(GuidStoredObject):
_id = fields.StringField(primary=True)
user = fields.ForeignField('user', required=True, backref='commented')
node = fields.ForeignField('node', required=True, backref='comment_owner')
target = fields.AbstractForeignField(required=True, backref='commented')
date_created = fields.DateTimeField(auto_now_add=datetime.datetime.utcnow)
date_modified = fields.DateTimeField(auto_now=datetime.datetime.utcnow)
modified = fields.BooleanField()
is_deleted = fields.BooleanField(default=False)
content = fields.StringField()
# Dictionary field mapping user IDs to dictionaries of report details:
# {
# 'icpnw': {'category': 'hate', 'message': 'offensive'},
# 'cdi38': {'category': 'spam', 'message': 'godwins law'},
# }
reports = fields.DictionaryField(validate=validate_comment_reports)
@classmethod
def create(cls, auth, **kwargs):
comment = cls(**kwargs)
comment.save()
comment.node.add_log(
NodeLog.COMMENT_ADDED,
{
'project': comment.node.parent_id,
'node': comment.node._id,
'user': comment.user._id,
'comment': comment._id,
},
auth=auth,
save=False,
)
comment.node.save()
return comment
def edit(self, content, auth, save=False):
self.content = content
self.modified = True
self.node.add_log(
NodeLog.COMMENT_UPDATED,
{
'project': self.node.parent_id,
'node': self.node._id,
'user': self.user._id,
'comment': self._id,
},
auth=auth,
save=False,
)
if save:
self.save()
def delete(self, auth, save=False):
self.is_deleted = True
self.node.add_log(
NodeLog.COMMENT_REMOVED,
{
'project': self.node.parent_id,
'node': self.node._id,
'user': self.user._id,
'comment': self._id,
},
auth=auth,
save=False,
)
if save:
self.save()
def undelete(self, auth, save=False):
self.is_deleted = False
self.node.add_log(
NodeLog.COMMENT_ADDED,
{
'project': self.node.parent_id,
'node': self.node._id,
'user': self.user._id,
'comment': self._id,
},
auth=auth,
save=False,
)
if save:
self.save()
def report_abuse(self, user, save=False, **kwargs):
"""Report that a comment is abuse.
:param User user: User submitting the report
:param bool save: Save changes
:param dict kwargs: Report details
:raises: ValueError if the user submitting abuse is the same as the
user who posted the comment
"""
if user == self.user:
raise ValueError
self.reports[user._id] = kwargs
if save:
self.save()
def unreport_abuse(self, user, save=False):
"""Revoke report of abuse.
:param User user: User who submitted the report
:param bool save: Save changes
:raises: ValueError if user has not reported comment as abuse
"""
try:
self.reports.pop(user._id)
except KeyError:
raise ValueError('User has not reported comment as abuse')
if save:
self.save()
class ApiKey(StoredObject):
# The key is also its primary key
_id = fields.StringField(
primary=True,
default=lambda: str(ObjectId()) + str(uuid.uuid4())
)
# A display name
label = fields.StringField()
@property
def user(self):
return self.user__keyed[0] if self.user__keyed else None
@property
def node(self):
return self.node__keyed[0] if self.node__keyed else None
@unique_on(['params.node', '_id'])
class NodeLog(StoredObject):
_id = fields.StringField(primary=True, default=lambda: str(ObjectId()))
date = fields.DateTimeField(default=datetime.datetime.utcnow, index=True)
action = fields.StringField(index=True)
params = fields.DictionaryField()
should_hide = fields.BooleanField(default=False)
was_connected_to = fields.ForeignField('node', list=True)
user = fields.ForeignField('user', backref='created')
api_key = fields.ForeignField('apikey', backref='created')
foreign_user = fields.StringField()
DATE_FORMAT = '%m/%d/%Y %H:%M UTC'
# Log action constants
CREATED_FROM = 'created_from'
PROJECT_CREATED = 'project_created'
PROJECT_REGISTERED = 'project_registered'
PROJECT_DELETED = 'project_deleted'
NODE_CREATED = 'node_created'
NODE_FORKED = 'node_forked'
NODE_REMOVED = 'node_removed'
POINTER_CREATED = 'pointer_created'
POINTER_FORKED = 'pointer_forked'
POINTER_REMOVED = 'pointer_removed'
WIKI_UPDATED = 'wiki_updated'
WIKI_DELETED = 'wiki_deleted'
WIKI_RENAMED = 'wiki_renamed'
CONTRIB_ADDED = 'contributor_added'
CONTRIB_REMOVED = 'contributor_removed'
CONTRIB_REORDERED = 'contributors_reordered'
PERMISSIONS_UPDATED = 'permissions_updated'
MADE_PRIVATE = 'made_private'
MADE_PUBLIC = 'made_public'
TAG_ADDED = 'tag_added'
TAG_REMOVED = 'tag_removed'
EDITED_TITLE = 'edit_title'
EDITED_DESCRIPTION = 'edit_description'
UPDATED_FIELDS = 'updated_fields'
FILE_MOVED = 'addon_file_moved'
FILE_COPIED = 'addon_file_copied'
FOLDER_CREATED = 'folder_created'
FILE_ADDED = 'file_added'
FILE_UPDATED = 'file_updated'
FILE_REMOVED = 'file_removed'
FILE_RESTORED = 'file_restored'
ADDON_ADDED = 'addon_added'
ADDON_REMOVED = 'addon_removed'
COMMENT_ADDED = 'comment_added'
COMMENT_REMOVED = 'comment_removed'
COMMENT_UPDATED = 'comment_updated'
MADE_CONTRIBUTOR_VISIBLE = 'made_contributor_visible'
MADE_CONTRIBUTOR_INVISIBLE = 'made_contributor_invisible'
EXTERNAL_IDS_ADDED = 'external_ids_added'
def __repr__(self):
return ('<NodeLog({self.action!r}, params={self.params!r}) '
'with id {self._id!r}>').format(self=self)
@property
def node(self):
"""Return the :class:`Node` associated with this log."""
return (
Node.load(self.params.get('node')) or
Node.load(self.params.get('project'))
)
@property
def tz_date(self):
'''Return the timezone-aware date.
'''
# Date should always be defined, but a few logs in production are
# missing dates; return None and log error if date missing
if self.date:
return self.date.replace(tzinfo=pytz.UTC)
logger.error('Date missing on NodeLog {}'.format(self._primary_key))
@property
def formatted_date(self):
'''Return the timezone-aware, ISO-formatted string representation of
this log's date.
'''
if self.tz_date:
return self.tz_date.isoformat()
def resolve_node(self, node):
"""A single `NodeLog` record may be attached to multiple `Node` records
(parents, forks, registrations, etc.), so the node that the log refers
to may not be the same as the node the user is viewing. Use
`resolve_node` to determine the relevant node to use for permission
checks.
:param Node node: Node being viewed
"""
if self.node == node or self.node in node.nodes:
return self.node
if node.is_fork_of(self.node) or node.is_registration_of(self.node):
return node
for child in node.nodes:
if child.is_fork_of(self.node) or node.is_registration_of(self.node):
return child
return False
def can_view(self, node, auth):
node_to_check = self.resolve_node(node)
if node_to_check:
return node_to_check.can_view(auth)
return False
def _render_log_contributor(self, contributor, anonymous=False):
user = User.load(contributor)
if not user:
return None
if self.node:
fullname = user.display_full_name(node=self.node)
else:
fullname = user.fullname
return {
'id': privacy_info_handle(user._primary_key, anonymous),
'fullname': privacy_info_handle(fullname, anonymous, name=True),
'registered': user.is_registered,
}
class Tag(StoredObject):
_id = fields.StringField(primary=True, validate=MaxLengthValidator(128))
def __repr__(self):
return '<Tag() with id {self._id!r}>'.format(self=self)
@property
def url(self):
return '/search/?tags={}'.format(self._id)
class Pointer(StoredObject):
"""A link to a Node. The Pointer delegates all but a few methods to its
contained Node. Forking and registration are overridden such that the
link is cloned, but its contained Node is not.
"""
#: Whether this is a pointer or not
primary = False
_id = fields.StringField()
node = fields.ForeignField('node', backref='_pointed')
_meta = {'optimistic': True}
def _clone(self):
if self.node:
clone = self.clone()
clone.node = self.node
clone.save()
return clone
def fork_node(self, *args, **kwargs):
return self._clone()
def register_node(self, *args, **kwargs):
return self._clone()
def use_as_template(self, *args, **kwargs):
return self._clone()
def resolve(self):
return self.node
def __getattr__(self, item):
"""Delegate attribute access to the node being pointed to.
"""
# Prevent backref lookups from being overriden by proxied node
try:
return super(Pointer, self).__getattr__(item)
except AttributeError:
pass
if self.node:
return getattr(self.node, item)
raise AttributeError(
'Pointer object has no attribute {0}'.format(
item
)
)
def get_pointer_parent(pointer):
"""Given a `Pointer` object, return its parent node.
"""
# The `parent_node` property of the `Pointer` schema refers to the parents
# of the pointed-at `Node`, not the parents of the `Pointer`; use the
# back-reference syntax to find the parents of the `Pointer`.
parent_refs = pointer.node__parent
assert len(parent_refs) == 1, 'Pointer must have exactly one parent'
return parent_refs[0]
def validate_category(value):
"""Validator for Node#category. Makes sure that the value is one of the
categories defined in CATEGORY_MAP.
"""
if value not in Node.CATEGORY_MAP.keys():
raise ValidationValueError('Invalid value for category.')
return True
def validate_title(value):
"""Validator for Node#title. Makes sure that the value exists.
"""
if value is None or not value.strip():
raise ValidationValueError('Title cannot be blank.')
return True
def validate_user(value):
if value != {}:
user_id = value.iterkeys().next()
if User.find(Q('_id', 'eq', user_id)).count() != 1:
raise ValidationValueError('User does not exist.')
return True
class NodeUpdateError(Exception):
def __init__(self, reason, key, *args, **kwargs):
super(NodeUpdateError, self).__init__(*args, **kwargs)
self.key = key
self.reason = reason
class Node(GuidStoredObject, AddonModelMixin, IdentifierMixin):
#: Whether this is a pointer or not
primary = True
# Node fields that trigger an update to Solr on save
SOLR_UPDATE_FIELDS = {
'title',
'category',
'description',
'visible_contributor_ids',
'tags',
'is_fork',
'is_registration',
'is_public',
'is_deleted',
'wiki_pages_current',
}
# Maps category identifier => Human-readable representation for use in
# titles, menus, etc.
# Use an OrderedDict so that menu items show in the correct order
CATEGORY_MAP = OrderedDict([
('', 'Uncategorized'),
('project', 'Project'),
('hypothesis', 'Hypothesis'),
('methods and measures', 'Methods and Measures'),
('procedure', 'Procedure'),
('instrumentation', 'Instrumentation'),
('data', 'Data'),
('analysis', 'Analysis'),
('communication', 'Communication'),
('other', 'Other'),
])
WRITABLE_WHITELIST = [
'title',
'description',
'category',
]
_id = fields.StringField(primary=True)
date_created = fields.DateTimeField(auto_now_add=datetime.datetime.utcnow, index=True)
# Privacy
is_public = fields.BooleanField(default=False, index=True)
# User mappings
permissions = fields.DictionaryField()
visible_contributor_ids = fields.StringField(list=True)
# Project Organization
is_dashboard = fields.BooleanField(default=False, index=True)
is_folder = fields.BooleanField(default=False, index=True)
# Expanded: Dictionary field mapping user IDs to expand state of this node:
# {
# 'icpnw': True,
# 'cdi38': False,
# }
expanded = fields.DictionaryField(default={}, validate=validate_user)
is_deleted = fields.BooleanField(default=False, index=True)
deleted_date = fields.DateTimeField(index=True)
is_registration = fields.BooleanField(default=False, index=True)
registered_date = fields.DateTimeField(index=True)
registered_user = fields.ForeignField('user', backref='registered')
registered_schema = fields.ForeignField('metaschema', backref='registered')
registered_meta = fields.DictionaryField()
is_fork = fields.BooleanField(default=False, index=True)
forked_date = fields.DateTimeField(index=True)
title = fields.StringField(validate=validate_title)
description = fields.StringField()
category = fields.StringField(validate=validate_category, index=True)
# One of 'public', 'private'
# TODO: Add validator
comment_level = fields.StringField(default='private')
wiki_pages_current = fields.DictionaryField()
wiki_pages_versions = fields.DictionaryField()
# Dictionary field mapping node wiki page to sharejs private uuid.
# {<page_name>: <sharejs_id>}
wiki_private_uuids = fields.DictionaryField()
file_guid_to_share_uuids = fields.DictionaryField()
creator = fields.ForeignField('user', backref='created')
contributors = fields.ForeignField('user', list=True, backref='contributed')
users_watching_node = fields.ForeignField('user', list=True, backref='watched')
logs = fields.ForeignField('nodelog', list=True, backref='logged')
tags = fields.ForeignField('tag', list=True, backref='tagged')
# Tags for internal use
system_tags = fields.StringField(list=True)
nodes = fields.AbstractForeignField(list=True, backref='parent')
forked_from = fields.ForeignField('node', backref='forked', index=True)
registered_from = fields.ForeignField('node', backref='registrations', index=True)
# The node (if any) used as a template for this node's creation
template_node = fields.ForeignField('node', backref='template_node', index=True)
api_keys = fields.ForeignField('apikey', list=True, backref='keyed')
piwik_site_id = fields.StringField()
# Dictionary field mapping user id to a list of nodes in node.nodes which the user has subscriptions for
# {<User.id>: [<Node._id>, <Node2._id>, ...] }
child_node_subscriptions = fields.DictionaryField(default=dict)
_meta = {
'optimistic': True,
}
def __init__(self, *args, **kwargs):
super(Node, self).__init__(*args, **kwargs)
if kwargs.get('_is_loaded', False):
return
if self.creator:
self.contributors.append(self.creator)
self.set_visible(self.creator, visible=True, log=False)
# Add default creator permissions
for permission in CREATOR_PERMISSIONS:
self.add_permission(self.creator, permission, save=False)
def __repr__(self):
return ('<Node(title={self.title!r}, category={self.category!r}) '
'with _id {self._id!r}>').format(self=self)
# For Django compatibility
@property
def pk(self):
return self._id
@property
def category_display(self):
"""The human-readable representation of this node's category."""
return self.CATEGORY_MAP[self.category]
@property
def private_links(self):
return self.privatelink__shared
@property
def private_links_active(self):
return [x for x in self.private_links if not x.is_deleted]
@property
def private_link_keys_active(self):
return [x.key for x in self.private_links if not x.is_deleted]
@property
def private_link_keys_deleted(self):
return [x.key for x in self.private_links if x.is_deleted]
def path_above(self, auth):
parents = self.parents
return '/' + '/'.join([p.title if p.can_view(auth) else '-- private project --' for p in reversed(parents)])
@property
def ids_above(self):
parents = self.parents
return {p._id for p in parents}
def can_edit(self, auth=None, user=None):
"""Return if a user is authorized to edit this node.
Must specify one of (`auth`, `user`).
:param Auth auth: Auth object to check
:param User user: User object to check
:returns: Whether user has permission to edit this node.
"""
if not auth and not user:
raise ValueError('Must pass either `auth` or `user`')
if auth and user:
raise ValueError('Cannot pass both `auth` and `user`')
user = user or auth.user
if auth:
is_api_node = auth.api_node == self
else:
is_api_node = False
return (
(user and self.has_permission(user, 'write'))
or is_api_node
)
def is_admin_parent(self, user):
if self.has_permission(user, 'admin', check_parent=False):
return True
if self.parent_node:
return self.parent_node.is_admin_parent(user)
return False
def can_view(self, auth):
if not auth and not self.is_public:
return False
return (
self.is_public or
(auth.user and self.has_permission(auth.user, 'read')) or
auth.private_key in self.private_link_keys_active or
self.is_admin_parent(auth.user)
)
def is_expanded(self, user=None):
"""Return if a user is has expanded the folder in the dashboard view.
Must specify one of (`auth`, `user`).
:param User user: User object to check
:returns: Boolean if the folder is expanded.
"""
if user._id in self.expanded:
return self.expanded[user._id]
else:
return False
def expand(self, user=None):
self.expanded[user._id] = True
self.save()
def collapse(self, user=None):
self.expanded[user._id] = False
self.save()
def is_derived_from(self, other, attr):
derived_from = getattr(self, attr)
while True:
if derived_from is None:
return False
if derived_from == other:
return True
derived_from = getattr(derived_from, attr)
def is_fork_of(self, other):
return self.is_derived_from(other, 'forked_from')
def is_registration_of(self, other):
return self.is_derived_from(other, 'registered_from')
@property
def forks(self):
"""List of forks of this node"""
return list(self.node__forked.find(Q('is_deleted', 'eq', False) &
Q('is_registration', 'ne', True)))
def add_permission(self, user, permission, save=False):
"""Grant permission to a user.
:param User user: User to grant permission to
:param str permission: Permission to grant
:param bool save: Save changes
:raises: ValueError if user already has permission
"""
if user._id not in self.permissions:
self.permissions[user._id] = [permission]
else:
if permission in self.permissions[user._id]:
raise ValueError('User already has permission {0}'.format(permission))
self.permissions[user._id].append(permission)
if save:
self.save()
def remove_permission(self, user, permission, save=False):
"""Revoke permission from a user.
:param User user: User to revoke permission from
:param str permission: Permission to revoke
:param bool save: Save changes
:raises: ValueError if user does not have permission
"""
try:
self.permissions[user._id].remove(permission)
except (KeyError, ValueError):
raise ValueError('User does not have permission {0}'.format(permission))
if save:
self.save()
def clear_permission(self, user, save=False):
"""Clear all permissions for a user.
:param User user: User to revoke permission from
:param bool save: Save changes
:raises: ValueError if user not in permissions
"""
try:
self.permissions.pop(user._id)
except KeyError:
raise ValueError(
'User {0} not in permissions list for node {1}'.format(
user._id, self._id,
)
)
if save:
self.save()
def set_permissions(self, user, permissions, save=False):
self.permissions[user._id] = permissions
if save:
self.save()
def has_permission(self, user, permission, check_parent=True):
"""Check whether user has permission.
:param User user: User to test
:param str permission: Required permission
:returns: User has required permission
"""
if user is None:
logger.warn('User is ``None``.')
return False
if permission in self.permissions.get(user._id, []):
return True
if permission == 'read' and check_parent:
return self.is_admin_parent(user)
return False
def can_read_children(self, user):
"""Checks if the given user has read permissions on any child nodes
that are not registrations or deleted
"""
if self.has_permission(user, 'read'):
return True
for node in self.nodes:
if not node.primary or node.is_deleted:
continue
if node.can_read_children(user):
return True
return False
def get_permissions(self, user):
"""Get list of permissions for user.
:param User user: User to check
:returns: List of permissions
:raises: ValueError if user not found in permissions
"""
return self.permissions.get(user._id, [])
def adjust_permissions(self):
for key in self.permissions.keys():
if key not in self.contributors:
self.permissions.pop(key)
@property
def visible_contributors(self):
return [
User.load(_id)
for _id in self.visible_contributor_ids
]
@property
def parents(self):
if self.parent_node:
return [self.parent_node] + self.parent_node.parents
return []
@property
def admin_contributor_ids(self, contributors=None):
contributor_ids = self.contributors._to_primary_keys()
admin_ids = set()
for parent in self.parents:
admins = [
user for user, perms in parent.permissions.iteritems()
if 'admin' in perms
]
admin_ids.update(set(admins).difference(contributor_ids))
return admin_ids
@property
def admin_contributors(self):
return sorted(
[User.load(_id) for _id in self.admin_contributor_ids],
key=lambda user: user.family_name,
)
def get_visible(self, user):
if not self.is_contributor(user):
raise ValueError(u'User {0} not in contributors'.format(user))
return user._id in self.visible_contributor_ids
def update_visible_ids(self, save=False):
"""Update the order of `visible_contributor_ids`. Updating on making
a contributor visible is more efficient than recomputing order on
accessing `visible_contributors`.
"""
self.visible_contributor_ids = [
contributor._id
for contributor in self.contributors
if contributor._id in self.visible_contributor_ids
]
if save:
self.save()
def set_visible(self, user, visible, log=True, auth=None, save=False):
if not self.is_contributor(user):
raise ValueError(u'User {0} not in contributors'.format(user))
if visible and user._id not in self.visible_contributor_ids:
self.visible_contributor_ids.append(user._id)
self.update_visible_ids(save=False)
elif not visible and user._id in self.visible_contributor_ids:
self.visible_contributor_ids.remove(user._id)
else:
return
message = (
NodeLog.MADE_CONTRIBUTOR_VISIBLE
if visible
else NodeLog.MADE_CONTRIBUTOR_INVISIBLE
)
if log:
self.add_log(
message,
params={
'parent': self.parent_id,
'node': self._id,
'contributors': [user._id],
},
auth=auth,
save=False,
)
if save:
self.save()
def can_comment(self, auth):
if self.comment_level == 'public':
return auth.logged_in and (
self.is_public or
(auth.user and self.has_permission(auth.user, 'read'))
)
return self.is_contributor(auth.user)
def update(self, fields, auth=None, save=True):
if self.is_registration:
raise NodeUpdateError(reason="Registered content cannot be updated")
values = {}
for key, value in fields.iteritems():
if key not in self.WRITABLE_WHITELIST:
continue
with warnings.catch_warnings():
try:
# This is in place because historically projects and components
# live on different ElasticSearch indexes, and at the time of Node.save
# there is no reliable way to check what the old Node.category
# value was. When the cateogory changes it is possible to have duplicate/dead
# search entries, so always delete the ES doc on categoryt change
# TODO: consolidate Node indexes into a single index, refactor search
if key == 'category':
self.delete_search_entry()
###############
values[key] = {
'old': getattr(self, key),
'new': value,
}
setattr(self, key, value)
except AttributeError:
raise NodeUpdateError(reason="Invalid value for attribute '{0}'".format(key), key=key)
except warnings.Warning:
raise NodeUpdateError(reason="Attribute '{0}' doesn't exist on the Node class".format(key), key=key)
if save:
updated = self.save()
else:
updated = []
for key in values:
values[key]['new'] = getattr(self, key)
self.add_log(NodeLog.UPDATED_FIELDS,
params={
'node': self._id,
'updated_fields': {
key: {
'old': values[key]['old'],
'new': values[key]['new']
}
for key in values
}
},
auth=auth)
return updated
def save(self, *args, **kwargs):
update_piwik = kwargs.pop('update_piwik', True)
self.adjust_permissions()
first_save = not self._is_loaded
if first_save and self.is_dashboard:
existing_dashboards = self.creator.node__contributed.find(
Q('is_dashboard', 'eq', True)
)
if existing_dashboards.count() > 0:
raise NodeStateError("Only one dashboard allowed per user.")
is_original = not self.is_registration and not self.is_fork
if 'suppress_log' in kwargs.keys():
suppress_log = kwargs['suppress_log']
del kwargs['suppress_log']
else:
suppress_log = False
saved_fields = super(Node, self).save(*args, **kwargs)
if first_save and is_original and not suppress_log:
# TODO: This logic also exists in self.use_as_template()
for addon in settings.ADDONS_AVAILABLE:
if 'node' in addon.added_default:
self.add_addon(addon.short_name, auth=None, log=False)
# Define log fields for non-component project
log_action = NodeLog.PROJECT_CREATED
log_params = {
'node': self._primary_key,
}
if getattr(self, 'parent', None):
# Append log to parent
self.parent.nodes.append(self)
self.parent.save()
log_params.update({'parent_node': self.parent._primary_key})
# Add log with appropriate fields
self.add_log(
log_action,
params=log_params,
auth=Auth(user=self.creator),
log_date=self.date_created,
save=True,
)
# Only update Solr if at least one stored field has changed, and if
# public or privacy setting has changed
need_update = bool(self.SOLR_UPDATE_FIELDS.intersection(saved_fields))
if not self.is_public:
if first_save or 'is_public' not in saved_fields:
need_update = False
if self.is_folder:
need_update = False
if need_update:
self.update_search()
# This method checks what has changed.
if settings.PIWIK_HOST and update_piwik:
piwik_tasks.update_node(self._id, saved_fields)
# Return expected value for StoredObject::save
return saved_fields
######################################
# Methods that return a new instance #
######################################
def use_as_template(self, auth, changes=None, top_level=True):
"""Create a new project, using an existing project as a template.
:param auth: The user to be assigned as creator
:param changes: A dictionary of changes, keyed by node id, which
override the attributes of the template project or its
children.
:return: The `Node` instance created.
"""
changes = changes or dict()
# build the dict of attributes to change for the new node
try:
attributes = changes[self._id]
# TODO: explicitly define attributes which may be changed.
except (AttributeError, KeyError):
attributes = dict()
new = self.clone()
# clear permissions, which are not cleared by the clone method
new.permissions = {}
new.visible_contributor_ids = []
# Clear quasi-foreign fields
new.wiki_pages_current = {}
new.wiki_pages_versions = {}
new.wiki_private_uuids = {}
new.file_guid_to_share_uuids = {}
# set attributes which may be overridden by `changes`
new.is_public = False
new.description = None
# apply `changes`
for attr, val in attributes.iteritems():
setattr(new, attr, val)
# set attributes which may NOT be overridden by `changes`
new.creator = auth.user
new.add_contributor(contributor=auth.user, log=False, save=False)
new.template_node = self
new.is_fork = False
new.is_registration = False
new.piwik_site_id = None
# If that title hasn't been changed, apply the default prefix (once)
if (new.title == self.title
and top_level
and language.TEMPLATED_FROM_PREFIX not in new.title):
new.title = ''.join((language.TEMPLATED_FROM_PREFIX, new.title, ))
# Slight hack - date_created is a read-only field.
new._fields['date_created'].__set__(
new,
datetime.datetime.utcnow(),
safe=True
)
new.save(suppress_log=True)
# Log the creation
new.add_log(
NodeLog.CREATED_FROM,
params={
'node': new._primary_key,
'template_node': {
'id': self._primary_key,
'url': self.url,
},
},
auth=auth,
log_date=new.date_created,
save=False,
)
# add mandatory addons
# TODO: This logic also exists in self.save()
for addon in settings.ADDONS_AVAILABLE:
if 'node' in addon.added_default:
new.add_addon(addon.short_name, auth=None, log=False)
# deal with the children of the node, if any
new.nodes = [
x.use_as_template(auth, changes, top_level=False)
for x in self.nodes
if x.can_view(auth)
]
new.save()
return new
############
# Pointers #
############
def add_pointer(self, node, auth, save=True):
"""Add a pointer to a node.
:param Node node: Node to add
:param Auth auth: Consolidated authorization
:param bool save: Save changes
:return: Created pointer
"""
# Fail if node already in nodes / pointers. Note: cast node and node
# to primary keys to test for conflicts with both nodes and pointers
# contained in `self.nodes`.
if node._id in self.node_ids:
raise ValueError(
'Pointer to node {0} already in list'.format(node._id)
)
# If a folder, prevent more than one pointer to that folder. This will prevent infinite loops on the Dashboard.
# Also, no pointers to the dashboard project, which could cause loops as well.
already_pointed = node.pointed
if node.is_folder and len(already_pointed) > 0:
raise ValueError(
'Pointer to folder {0} already exists. Only one pointer to any given folder allowed'.format(node._id)
)
if node.is_dashboard:
raise ValueError(
'Pointer to dashboard ({0}) not allowed.'.format(node._id)
)
# Append pointer
pointer = Pointer(node=node)
pointer.save()
self.nodes.append(pointer)
# Add log
self.add_log(
action=NodeLog.POINTER_CREATED,
params={
'parent_node': self.parent_id,
'node': self._primary_key,
'pointer': {
'id': pointer.node._id,
'url': pointer.node.url,
'title': pointer.node.title,
'category': pointer.node.category,
},
},
auth=auth,
save=False,
)
# Optionally save changes
if save:
self.save()
return pointer
def rm_pointer(self, pointer, auth):
"""Remove a pointer.
:param Pointer pointer: Pointer to remove
:param Auth auth: Consolidated authorization
"""
if pointer not in self.nodes:
raise ValueError
# Remove `Pointer` object; will also remove self from `nodes` list of
# parent node
Pointer.remove_one(pointer)
# Add log
self.add_log(
action=NodeLog.POINTER_REMOVED,
params={
'parent_node': self.parent_id,
'node': self._primary_key,
'pointer': {
'id': pointer.node._id,
'url': pointer.node.url,
'title': pointer.node.title,
'category': pointer.node.category,
},
},
auth=auth,
save=False,
)
@property
def node_ids(self):
return [
node._id if node.primary else node.node._id
for node in self.nodes
]
@property
def nodes_primary(self):
return [
node
for node in self.nodes
if node.primary
]
@property
def depth(self):
return len(self.parents)
def next_descendants(self, auth, condition=lambda auth, node: True):
"""
Recursively find the first set of descedants under a given node that meet a given condition
returns a list of [(node, [children]), ...]
"""
ret = []
for node in self.nodes:
if condition(auth, node):
# base case
ret.append((node, []))
else:
ret.append((node, node.next_descendants(auth, condition)))
ret = [item for item in ret if item[1] or condition(auth, item[0])] # prune empty branches
return ret
def get_descendants_recursive(self, include=lambda n: True):
for node in self.nodes:
if include(node):
yield node
if node.primary:
for descendant in node.get_descendants_recursive(include):
if include(descendant):
yield descendant
def get_aggregate_logs_queryset(self, auth):
ids = [self._id] + [n._id
for n in self.get_descendants_recursive()
if n.can_view(auth)]
query = Q('__backrefs.logged.node.logs', 'in', ids)
return NodeLog.find(query).sort('-_id')
@property
def nodes_pointer(self):
return [
node
for node in self.nodes
if not node.primary
]
@property
def has_pointers_recursive(self):
"""Recursively checks whether the current node or any of its nodes
contains a pointer.
"""
if self.nodes_pointer:
return True
for node in self.nodes_primary:
if node.has_pointers_recursive:
return True
return False
@property
def pointed(self):
return getattr(self, '_pointed', [])
def pointing_at(self, pointed_node_id):
"""This node is pointed at another node.
:param Node pointed_node_id: The node id of the node being pointed at.
:return: pointer_id
"""
for pointer in self.nodes_pointer:
node_id = pointer.node._id
if node_id == pointed_node_id:
return pointer._id
return None
def get_points(self, folders=False, deleted=False, resolve=True):
ret = []
for each in self.pointed:
pointer_node = get_pointer_parent(each)
if not folders and pointer_node.is_folder:
continue
if not deleted and pointer_node.is_deleted:
continue
if resolve:
ret.append(pointer_node)
else:
ret.append(each)
return ret
def resolve(self):
return self
def fork_pointer(self, pointer, auth, save=True):
"""Replace a pointer with a fork. If the pointer points to a project,
fork the project and replace the pointer with a new pointer pointing
to the fork. If the pointer points to a component, fork the component
and add it to the current node.
:param Pointer pointer:
:param Auth auth:
:param bool save:
:return: Forked node
"""
# Fail if pointer not contained in `nodes`
try:
index = self.nodes.index(pointer)
except ValueError:
raise ValueError('Pointer {0} not in list'.format(pointer._id))
# Get pointed node
node = pointer.node
# Fork into current node and replace pointer with forked component
forked = node.fork_node(auth)
if forked is None:
raise ValueError('Could not fork node')
self.nodes[index] = forked
# Add log
self.add_log(
NodeLog.POINTER_FORKED,
params={
'parent_node': self.parent_id,
'node': self._primary_key,
'pointer': {
'id': pointer.node._id,
'url': pointer.node.url,
'title': pointer.node.title,
'category': pointer.node.category,
},
},
auth=auth,
save=False,
)
# Optionally save changes
if save:
self.save()
# Garbage-collect pointer. Note: Must save current node before
# removing pointer, else remove will fail when trying to remove
# backref from self to pointer.
Pointer.remove_one(pointer)
# Return forked content
return forked
def get_recent_logs(self, n=10):
"""Return a list of the n most recent logs, in reverse chronological
order.
:param int n: Number of logs to retrieve
"""
return list(reversed(self.logs)[:n])
@property
def date_modified(self):
'''The most recent datetime when this node was modified, based on
the logs.
'''
try:
return self.logs[-1].date
except IndexError:
return None
def set_title(self, title, auth, save=False):
"""Set the title of this Node and log it.
:param str title: The new title.
:param auth: All the auth information including user, API key.
"""
if title is None or not title.strip():
raise ValidationValueError('Title cannot be blank.')
original_title = self.title
self.title = title
self.add_log(
action=NodeLog.EDITED_TITLE,
params={
'parent_node': self.parent_id,
'node': self._primary_key,
'title_new': self.title,
'title_original': original_title,
},
auth=auth,
save=False,
)
if save:
self.save()
return None
def set_description(self, description, auth, save=False):
"""Set the description and log the event.
:param str description: The new description
:param auth: All the auth informtion including user, API key.
:param bool save: Save self after updating.
"""
original = self.description
self.description = description
self.add_log(
action=NodeLog.EDITED_DESCRIPTION,
params={
'parent_node': self.parent_id,
'node': self._primary_key,
'description_new': self.description,
'description_original': original
},
auth=auth,
save=False,
)
if save:
self.save()
return None
def update_search(self):
from website import search
try:
search.search.update_node(self)
except search.exceptions.SearchUnavailableError as e:
logger.exception(e)
log_exception()
def delete_search_entry(self):
from website import search
try:
search.search.delete_node(self)
except search.exceptions.SearchUnavailableError as e:
logger.exception(e)
log_exception()
def remove_node(self, auth, date=None):
"""Marks a node as deleted.
TODO: Call a hook on addons
Adds a log to the parent node if applicable
:param auth: an instance of :class:`Auth`.
:param date: Date node was removed
:type date: `datetime.datetime` or `None`
"""
# TODO: rename "date" param - it's shadowing a global
if self.is_dashboard:
raise NodeStateError("Dashboards may not be deleted.")
if not self.can_edit(auth):
raise PermissionsError('{0!r} does not have permission to modify this {1}'.format(auth.user, self.category or 'node'))
#if this is a folder, remove all the folders that this is pointing at.
if self.is_folder:
for pointed in self.nodes_pointer:
if pointed.node.is_folder:
pointed.node.remove_node(auth=auth)
if [x for x in self.nodes_primary if not x.is_deleted]:
raise NodeStateError("Any child components must be deleted prior to deleting this project.")
# After delete callback
for addon in self.get_addons():
message = addon.after_delete(self, auth.user)
if message:
status.push_status_message(message)
log_date = date or datetime.datetime.utcnow()
# Add log to parent
if self.node__parent:
self.node__parent[0].add_log(
NodeLog.NODE_REMOVED,
params={
'project': self._primary_key,
},
auth=auth,
log_date=log_date,
save=True,
)
else:
self.add_log(
NodeLog.PROJECT_DELETED,
params={
'project': self._primary_key,
},
auth=auth,
log_date=log_date,
save=True,
)
self.is_deleted = True
self.deleted_date = date
self.save()
auth_signals.node_deleted.send(self)
return True
def fork_node(self, auth, title='Fork of '):
"""Recursively fork a node.
:param Auth auth: Consolidated authorization
:param str title: Optional text to prepend to forked title
:return: Forked node
"""
user = auth.user
# Non-contributors can't fork private nodes
if not (self.is_public or self.has_permission(user, 'read')):
raise PermissionsError('{0!r} does not have permission to fork node {1!r}'.format(user, self._id))
when = datetime.datetime.utcnow()
original = self.load(self._primary_key)
if original.is_deleted:
raise NodeStateError('Cannot fork deleted node.')
# Note: Cloning a node copies its `wiki_pages_current` and
# `wiki_pages_versions` fields, but does not clone the underlying
# database objects to which these dictionaries refer. This means that
# the cloned node must pass itself to its wiki objects to build the
# correct URLs to that content.
forked = original.clone()
forked.logs = self.logs
forked.tags = self.tags
# Recursively fork child nodes
for node_contained in original.nodes:
if not node_contained.is_deleted:
forked_node = None
try: # Catch the potential PermissionsError above
forked_node = node_contained.fork_node(auth=auth, title='')
except PermissionsError:
pass # If this exception is thrown omit the node from the result set
if forked_node is not None:
forked.nodes.append(forked_node)
forked.title = title + forked.title
forked.is_fork = True
forked.is_registration = False
forked.forked_date = when
forked.forked_from = original
forked.creator = user
forked.piwik_site_id = None
# Forks default to private status
forked.is_public = False
# Clear permissions before adding users
forked.permissions = {}
forked.visible_contributor_ids = []
forked.add_contributor(contributor=user, log=False, save=False)
forked.add_log(
action=NodeLog.NODE_FORKED,
params={
'parent_node': original.parent_id,
'node': original._primary_key,
'registration': forked._primary_key,
},
auth=auth,
log_date=when,
save=False,
)
forked.save()
# After fork callback
for addon in original.get_addons():
_, message = addon.after_fork(original, forked, user)
if message:
status.push_status_message(message)
return forked
def register_node(self, schema, auth, template, data):
"""Make a frozen copy of a node.
:param schema: Schema object
:param auth: All the auth information including user, API key.
:template: Template name
:data: Form data
"""
# NOTE: Admins can register child nodes even if they don't have write access them
if not self.can_edit(auth=auth) and not self.is_admin_parent(user=auth.user):
raise PermissionsError(
'User {} does not have permission '
'to register this node'.format(auth.user._id)
)
if self.is_folder:
raise NodeStateError("Folders may not be registered")
template = urllib.unquote_plus(template)
template = to_mongo(template)
when = datetime.datetime.utcnow()
original = self.load(self._primary_key)
# Note: Cloning a node copies its `wiki_pages_current` and
# `wiki_pages_versions` fields, but does not clone the underlying
# database objects to which these dictionaries refer. This means that
# the cloned node must pass itself to its wiki objects to build the
# correct URLs to that content.
if original.is_deleted:
raise NodeStateError('Cannot register deleted node.')
registered = original.clone()
registered.is_registration = True
registered.registered_date = when
registered.registered_user = auth.user
registered.registered_schema = schema
registered.registered_from = original
if not registered.registered_meta:
registered.registered_meta = {}
registered.registered_meta[template] = data
registered.contributors = self.contributors
registered.forked_from = self.forked_from
registered.creator = self.creator
registered.logs = self.logs
registered.tags = self.tags
registered.piwik_site_id = None
registered.save()
# After register callback
for addon in original.get_addons():
_, message = addon.after_register(original, registered, auth.user)
if message:
status.push_status_message(message)
registered.nodes = []
for node_contained in original.nodes:
if not node_contained.is_deleted:
registered_node = node_contained.register_node(
schema, auth, template, data
)
if registered_node is not None:
registered.nodes.append(registered_node)
original.add_log(
action=NodeLog.PROJECT_REGISTERED,
params={
'parent_node': original.parent_id,
'node': original._primary_key,
'registration': registered._primary_key,
},
auth=auth,
log_date=when,
save=False,
)
original.save()
registered.save()
for node in registered.nodes:
node.update_search()
return registered
def remove_tag(self, tag, auth, save=True):
if tag in self.tags:
self.tags.remove(tag)
self.add_log(
action=NodeLog.TAG_REMOVED,
params={
'parent_node': self.parent_id,
'node': self._primary_key,
'tag': tag,
},
auth=auth,
save=False,
)
if save:
self.save()
def add_tag(self, tag, auth, save=True):
if tag not in self.tags:
new_tag = Tag.load(tag)
if not new_tag:
new_tag = Tag(_id=tag)
new_tag.save()
self.tags.append(new_tag)
self.add_log(
action=NodeLog.TAG_ADDED,
params={
'parent_node': self.parent_id,
'node': self._primary_key,
'tag': tag,
},
auth=auth,
save=False,
)
if save:
self.save()
def add_log(self, action, params, auth, foreign_user=None, log_date=None, save=True):
user = auth.user if auth else None
api_key = auth.api_key if auth else None
params['node'] = params.get('node') or params.get('project')
log = NodeLog(
action=action,
user=user,
foreign_user=foreign_user,
api_key=api_key,
params=params,
)
if log_date:
log.date = log_date
log.save()
self.logs.append(log)
if save:
self.save()
if user:
increment_user_activity_counters(user._primary_key, action, log.date)
return log
@property
def url(self):
return '/{}/'.format(self._primary_key)
def web_url_for(self, view_name, _absolute=False, _guid=False, *args, **kwargs):
return web_url_for(view_name, pid=self._primary_key, _absolute=_absolute, _guid=_guid, *args, **kwargs)
def api_url_for(self, view_name, _absolute=False, *args, **kwargs):
return api_url_for(view_name, pid=self._primary_key, _absolute=_absolute, *args, **kwargs)
@property
def absolute_url(self):
if not self.url:
logger.error('Node {0} has a parent that is not a project'.format(self._id))
return None
return urlparse.urljoin(settings.DOMAIN, self.url)
@property
def display_absolute_url(self):
url = self.absolute_url
if url is not None:
return re.sub(r'https?:', '', url).strip('/')
@property
def api_v2_url(self):
return reverse('nodes:node-detail', kwargs={'node_id': self._id})
@property
def absolute_api_v2_url(self):
return absolute_reverse('nodes:node-detail', kwargs={'node_id': self._id})
# used by django and DRF
def get_absolute_url(self):
return self.absolute_api_v2_url
@property
def api_url(self):
if not self.url:
logger.error('Node {0} has a parent that is not a project'.format(self._id))
return None
return '/api/v1{0}'.format(self.deep_url)
@property
def deep_url(self):
return '/project/{}/'.format(self._primary_key)
@property
def csl(self): # formats node information into CSL format for citation parsing
"""a dict in CSL-JSON schema
For details on this schema, see:
https://github.com/citation-style-language/schema#csl-json-schema
"""
csl = {
'id': self._id,
'title': html_parser.unescape(self.title),
'author': [
contributor.csl_name # method in auth/model.py which parses the names of authors
for contributor in self.visible_contributors
],
'publisher': 'Open Science Framework',
'type': 'webpage',
'URL': self.display_absolute_url,
}
doi = self.get_identifier_value('doi')
if doi:
csl['DOI'] = doi
if self.logs:
csl['issued'] = datetime_to_csl(self.logs[-1].date)
return csl
def author_list(self, and_delim='&'):
author_names = [
author.biblio_name
for author in self.visible_contributors
if author
]
if len(author_names) < 2:
return ' {0} '.format(and_delim).join(author_names)
if len(author_names) > 7:
author_names = author_names[:7]
author_names.append('et al.')
return ', '.join(author_names)
return u'{0}, {1} {2}'.format(
', '.join(author_names[:-1]),
and_delim,
author_names[-1]
)
@property
def templated_list(self):
return [
x
for x in self.node__template_node
if not x.is_deleted
]
@property
def parent_node(self):
"""The parent node, if it exists, otherwise ``None``. Note: this
property is named `parent_node` rather than `parent` to avoid a
conflict with the `parent` back-reference created by the `nodes`
field on this schema.
"""
try:
if not self.node__parent[0].is_deleted:
return self.node__parent[0]
except IndexError:
pass
return None
@property
def root(self):
if self.parent_node:
return self.parent_node.root
else:
return self
@property
def watch_url(self):
return os.path.join(self.api_url, "watch/")
@property
def parent_id(self):
if self.node__parent:
return self.node__parent[0]._primary_key
return None
@property
def project_or_component(self):
return 'project' if self.category == 'project' else 'component'
def is_contributor(self, user):
return (
user is not None
and (
user._id in self.contributors
)
)
def add_addon(self, addon_name, auth, log=True, *args, **kwargs):
"""Add an add-on to the node. Do nothing if the addon is already
enabled.
:param str addon_name: Name of add-on
:param Auth auth: Consolidated authorization object
:param bool log: Add a log after adding the add-on
:return: A boolean, whether the addon was added
"""
ret = AddonModelMixin.add_addon(self, addon_name, auth=auth,
*args, **kwargs)
if ret and log:
config = settings.ADDONS_AVAILABLE_DICT[addon_name]
self.add_log(
action=NodeLog.ADDON_ADDED,
params={
'project': self.parent_id,
'node': self._primary_key,
'addon': config.full_name,
},
auth=auth,
save=False,
)
self.save() # TODO: here, or outside the conditional? @mambocab
return ret
def delete_addon(self, addon_name, auth, _force=False):
"""Delete an add-on from the node.
:param str addon_name: Name of add-on
:param Auth auth: Consolidated authorization object
:param bool _force: For migration testing ONLY. Do not set to True
in the application, or else projects will be allowed to delete
mandatory add-ons!
:return bool: Add-on was deleted
"""
ret = super(Node, self).delete_addon(addon_name, auth, _force)
if ret:
config = settings.ADDONS_AVAILABLE_DICT[addon_name]
self.add_log(
action=NodeLog.ADDON_REMOVED,
params={
'project': self.parent_id,
'node': self._primary_key,
'addon': config.full_name,
},
auth=auth,
save=False,
)
self.save()
# TODO: save here or outside the conditional? @mambocab
return ret
def callback(self, callback, recursive=False, *args, **kwargs):
"""Invoke callbacks of attached add-ons and collect messages.
:param str callback: Name of callback method to invoke
:param bool recursive: Apply callback recursively over nodes
:return list: List of callback messages
"""
messages = []
for addon in self.get_addons():
method = getattr(addon, callback)
message = method(self, *args, **kwargs)
if message:
messages.append(message)
if recursive:
for child in self.nodes:
if not child.is_deleted:
messages.extend(
child.callback(
callback, recursive, *args, **kwargs
)
)
return messages
def replace_contributor(self, old, new):
for i, contrib in enumerate(self.contributors):
if contrib._primary_key == old._primary_key:
self.contributors[i] = new
# Remove unclaimed record for the project
if self._primary_key in old.unclaimed_records:
del old.unclaimed_records[self._primary_key]
old.save()
for permission in self.get_permissions(old):
self.add_permission(new, permission)
self.permissions.pop(old._id)
if old._id in self.visible_contributor_ids:
self.visible_contributor_ids[self.visible_contributor_ids.index(old._id)] = new._id
return True
return False
def remove_contributor(self, contributor, auth, log=True):
"""Remove a contributor from this node.
:param contributor: User object, the contributor to be removed
:param auth: All the auth information including user, API key.
"""
# remove unclaimed record if necessary
if self._primary_key in contributor.unclaimed_records:
del contributor.unclaimed_records[self._primary_key]
self.contributors.remove(contributor._id)
self.clear_permission(contributor)
if contributor._id in self.visible_contributor_ids:
self.visible_contributor_ids.remove(contributor._id)
# Node must have at least one registered admin user
# TODO: Move to validator or helper
admins = [
user for user in self.contributors
if self.has_permission(user, 'admin')
and user.is_registered
]
if not admins:
return False
# Clear permissions for removed user
self.permissions.pop(contributor._id, None)
# After remove callback
for addon in self.get_addons():
message = addon.after_remove_contributor(self, contributor, auth)
if message:
status.push_status_message(message)
if log:
self.add_log(
action=NodeLog.CONTRIB_REMOVED,
params={
'project': self.parent_id,
'node': self._primary_key,
'contributor': contributor._id,
},
auth=auth,
save=False,
)
self.save()
#send signal to remove this user from project subscriptions
auth_signals.contributor_removed.send(contributor, node=self)
return True
def remove_contributors(self, contributors, auth=None, log=True, save=False):
results = []
removed = []
for contrib in contributors:
outcome = self.remove_contributor(
contributor=contrib, auth=auth, log=False,
)
results.append(outcome)
removed.append(contrib._id)
if log:
self.add_log(
action=NodeLog.CONTRIB_REMOVED,
params={
'project': self.parent_id,
'node': self._primary_key,
'contributors': removed,
},
auth=auth,
save=False,
)
if save:
self.save()
if False in results:
return False
return True
def manage_contributors(self, user_dicts, auth, save=False):
"""Reorder and remove contributors.
:param list user_dicts: Ordered list of contributors represented as
dictionaries of the form:
{'id': <id>, 'permission': <One of 'read', 'write', 'admin'>, 'visible': bool}
:param Auth auth: Consolidated authentication information
:param bool save: Save changes
:raises: ValueError if any users in `users` not in contributors or if
no admin contributors remaining
"""
with TokuTransaction():
users = []
user_ids = []
permissions_changed = {}
to_retain = []
to_remove = []
for user_dict in user_dicts:
user = User.load(user_dict['id'])
if user is None:
raise ValueError('User not found')
if user not in self.contributors:
raise ValueError(
'User {0} not in contributors'.format(user.fullname)
)
permissions = expand_permissions(user_dict['permission'])
if set(permissions) != set(self.get_permissions(user)):
self.set_permissions(user, permissions, save=False)
permissions_changed[user._id] = permissions
self.set_visible(user, user_dict['visible'], auth=auth)
users.append(user)
user_ids.append(user_dict['id'])
for user in self.contributors:
if user._id in user_ids:
to_retain.append(user)
else:
to_remove.append(user)
# TODO: Move to validator or helper @jmcarp
admins = [
user for user in users
if self.has_permission(user, 'admin')
and user.is_registered
]
if users is None or not admins:
raise ValueError(
'Must have at least one registered admin contributor'
)
if to_retain != users:
self.add_log(
action=NodeLog.CONTRIB_REORDERED,
params={
'project': self.parent_id,
'node': self._id,
'contributors': [
user._id
for user in users
],
},
auth=auth,
save=False,
)
if to_remove:
self.remove_contributors(to_remove, auth=auth, save=False)
self.contributors = users
if permissions_changed:
self.add_log(
action=NodeLog.PERMISSIONS_UPDATED,
params={
'project': self.parent_id,
'node': self._id,
'contributors': permissions_changed,
},
auth=auth,
save=False,
)
# Update list of visible IDs
self.update_visible_ids()
if save:
self.save()
with TokuTransaction():
if to_remove or permissions_changed and ['read'] in permissions_changed.values():
write_permissions_revoked.send(self)
def add_contributor(self, contributor, permissions=None, visible=True,
auth=None, log=True, save=False):
"""Add a contributor to the project.
:param User contributor: The contributor to be added
:param list permissions: Permissions to grant to the contributor
:param bool visible: Contributor is visible in project dashboard
:param Auth auth: All the auth information including user, API key
:param bool log: Add log to self
:param bool save: Save after adding contributor
:returns: Whether contributor was added
"""
MAX_RECENT_LENGTH = 15
# If user is merged into another account, use master account
contrib_to_add = contributor.merged_by if contributor.is_merged else contributor
if contrib_to_add not in self.contributors:
self.contributors.append(contrib_to_add)
if visible:
self.set_visible(contrib_to_add, visible=True, log=False)
# Add default contributor permissions
permissions = permissions or DEFAULT_CONTRIBUTOR_PERMISSIONS
for permission in permissions:
self.add_permission(contrib_to_add, permission, save=False)
# Add contributor to recently added list for user
if auth is not None:
user = auth.user
if contrib_to_add in user.recently_added:
user.recently_added.remove(contrib_to_add)
user.recently_added.insert(0, contrib_to_add)
while len(user.recently_added) > MAX_RECENT_LENGTH:
user.recently_added.pop()
if log:
self.add_log(
action=NodeLog.CONTRIB_ADDED,
params={
'project': self.parent_id,
'node': self._primary_key,
'contributors': [contrib_to_add._primary_key],
},
auth=auth,
save=False,
)
if save:
self.save()
contributor_added.send(self, contributor=contributor, auth=auth)
return True
#Permissions must be overridden if changed when contributor is added to parent he/she is already on a child of.
elif contrib_to_add in self.contributors and permissions is not None:
self.set_permissions(contrib_to_add, permissions)
if save:
self.save()
return False
else:
return False
def add_contributors(self, contributors, auth=None, log=True, save=False):
"""Add multiple contributors
:param contributors: A list of User objects to add as contributors.
:param auth: All the auth information including user, API key.
:param log: Add log to self
:param save: Save after adding contributor
"""
for contrib in contributors:
self.add_contributor(
contributor=contrib['user'], permissions=contrib['permissions'],
visible=contrib['visible'], auth=auth, log=False, save=False,
)
if log and contributors:
self.add_log(
action=NodeLog.CONTRIB_ADDED,
params={
'project': self.parent_id,
'node': self._primary_key,
'contributors': [
contrib['user']._id
for contrib in contributors
],
},
auth=auth,
save=False,
)
if save:
self.save()
def add_unregistered_contributor(self, fullname, email, auth,
permissions=None, save=False):
"""Add a non-registered contributor to the project.
:param str fullname: The full name of the person.
:param str email: The email address of the person.
:param Auth auth: Auth object for the user adding the contributor.
:returns: The added contributor
:raises: DuplicateEmailError if user with given email is already in the database.
"""
# Create a new user record
contributor = User.create_unregistered(fullname=fullname, email=email)
contributor.add_unclaimed_record(node=self, referrer=auth.user,
given_name=fullname, email=email)
try:
contributor.save()
except ValidationValueError: # User with same email already exists
contributor = get_user(email=email)
# Unregistered users may have multiple unclaimed records, so
# only raise error if user is registered.
if contributor.is_registered or self.is_contributor(contributor):
raise
contributor.add_unclaimed_record(node=self, referrer=auth.user,
given_name=fullname, email=email)
contributor.save()
self.add_contributor(
contributor, permissions=permissions, auth=auth,
log=True, save=False,
)
self.save()
return contributor
def set_privacy(self, permissions, auth=None):
"""Set the permissions for this node.
:param permissions: A string, either 'public' or 'private'
:param auth: All the auth informtion including user, API key.
"""
if permissions == 'public' and not self.is_public:
self.is_public = True
elif permissions == 'private' and self.is_public:
self.is_public = False
else:
return False
# After set permissions callback
for addon in self.get_addons():
message = addon.after_set_privacy(self, permissions)
if message:
status.push_status_message(message)
action = NodeLog.MADE_PUBLIC if permissions == 'public' else NodeLog.MADE_PRIVATE
self.add_log(
action=action,
params={
'project': self.parent_id,
'node': self._primary_key,
},
auth=auth,
save=False,
)
self.save()
return True
# TODO: Move to wiki add-on
def get_wiki_page(self, name=None, version=None, id=None):
from website.addons.wiki.model import NodeWikiPage
if name:
name = (name or '').strip()
key = to_mongo_key(name)
try:
if version and (isinstance(version, int) or version.isdigit()):
id = self.wiki_pages_versions[key][int(version) - 1]
elif version == 'previous':
id = self.wiki_pages_versions[key][-2]
elif version == 'current' or version is None:
id = self.wiki_pages_current[key]
else:
return None
except (KeyError, IndexError):
return None
return NodeWikiPage.load(id)
# TODO: Move to wiki add-on
def update_node_wiki(self, name, content, auth):
"""Update the node's wiki page with new content.
:param page: A string, the page's name, e.g. ``"home"``.
:param content: A string, the posted content.
:param auth: All the auth information including user, API key.
"""
from website.addons.wiki.model import NodeWikiPage
name = (name or '').strip()
key = to_mongo_key(name)
if key not in self.wiki_pages_current:
if key in self.wiki_pages_versions:
version = len(self.wiki_pages_versions[key]) + 1
else:
version = 1
else:
current = NodeWikiPage.load(self.wiki_pages_current[key])
current.is_current = False
version = current.version + 1
current.save()
new_page = NodeWikiPage(
page_name=name,
version=version,
user=auth.user,
is_current=True,
node=self,
content=content
)
new_page.save()
# check if the wiki page already exists in versions (existed once and is now deleted)
if key not in self.wiki_pages_versions:
self.wiki_pages_versions[key] = []
self.wiki_pages_versions[key].append(new_page._primary_key)
self.wiki_pages_current[key] = new_page._primary_key
self.add_log(
action=NodeLog.WIKI_UPDATED,
params={
'project': self.parent_id,
'node': self._primary_key,
'page': new_page.page_name,
'page_id': new_page._primary_key,
'version': new_page.version,
},
auth=auth,
log_date=new_page.date,
save=False,
)
self.save()
# TODO: Move to wiki add-on
def rename_node_wiki(self, name, new_name, auth):
"""Rename the node's wiki page with new name.
:param name: A string, the page's name, e.g. ``"My Page"``.
:param new_name: A string, the new page's name, e.g. ``"My Renamed Page"``.
:param auth: All the auth information including user, API key.
"""
# TODO: Fix circular imports
from website.addons.wiki.exceptions import (
PageCannotRenameError,
PageConflictError,
PageNotFoundError,
)
name = (name or '').strip()
key = to_mongo_key(name)
new_name = (new_name or '').strip()
new_key = to_mongo_key(new_name)
page = self.get_wiki_page(name)
if key == 'home':
raise PageCannotRenameError('Cannot rename wiki home page')
if not page:
raise PageNotFoundError('Wiki page not found')
if (new_key in self.wiki_pages_current and key != new_key) or new_key == 'home':
raise PageConflictError(
'Page already exists with name {0}'.format(
new_name,
)
)
# rename the page first in case we hit a validation exception.
old_name = page.page_name
page.rename(new_name)
# TODO: merge historical records like update (prevents log breaks)
# transfer the old page versions/current keys to the new name.
if key != new_key:
self.wiki_pages_versions[new_key] = self.wiki_pages_versions[key]
del self.wiki_pages_versions[key]
self.wiki_pages_current[new_key] = self.wiki_pages_current[key]
del self.wiki_pages_current[key]
if key in self.wiki_private_uuids:
self.wiki_private_uuids[new_key] = self.wiki_private_uuids[key]
del self.wiki_private_uuids[key]
self.add_log(
action=NodeLog.WIKI_RENAMED,
params={
'project': self.parent_id,
'node': self._primary_key,
'page': page.page_name,
'page_id': page._primary_key,
'old_page': old_name,
'version': page.version,
},
auth=auth,
save=False,
)
self.save()
def delete_node_wiki(self, name, auth):
name = (name or '').strip()
key = to_mongo_key(name)
page = self.get_wiki_page(key)
del self.wiki_pages_current[key]
self.add_log(
action=NodeLog.WIKI_DELETED,
params={
'project': self.parent_id,
'node': self._primary_key,
'page': page.page_name,
'page_id': page._primary_key,
},
auth=auth,
save=False,
)
self.save()
def get_stats(self, detailed=False):
if detailed:
raise NotImplementedError(
'Detailed stats exist, but are not yet implemented.'
)
else:
return get_basic_counters('node:%s' % self._primary_key)
# TODO: Deprecate this; it duplicates much of what serialize_project already
# does
def serialize(self, auth=None):
"""Dictionary representation of node that is nested within a NodeLog's
representation.
"""
# TODO: incomplete implementation
return {
'id': str(self._primary_key),
'category': self.category_display,
'node_type': self.project_or_component,
'url': self.url,
# TODO: Titles shouldn't contain escaped HTML in the first place
'title': html_parser.unescape(self.title),
'path': self.path_above(auth),
'api_url': self.api_url,
'is_public': self.is_public,
'is_registration': self.is_registration,
}
@Node.subscribe('before_save')
def validate_permissions(schema, instance):
"""Ensure that user IDs in `contributors` and `permissions` match.
"""
node = instance
contributor_ids = set([user._id for user in node.contributors])
permission_ids = set(node.permissions.keys())
mismatched_contributors = contributor_ids.difference(permission_ids)
if mismatched_contributors:
raise ValidationValueError(
'Contributors {0} missing from `permissions` on node {1}'.format(
', '.join(mismatched_contributors),
node._id,
)
)
mismatched_permissions = permission_ids.difference(contributor_ids)
if mismatched_permissions:
raise ValidationValueError(
'Permission keys {0} missing from `contributors` on node {1}'.format(
', '.join(mismatched_contributors),
node._id,
)
)
@Node.subscribe('before_save')
def validate_visible_contributors(schema, instance):
"""Ensure that user IDs in `contributors` and `visible_contributor_ids`
match.
"""
node = instance
for user_id in node.visible_contributor_ids:
if user_id not in node.contributors:
raise ValidationValueError(
('User {0} is in `visible_contributor_ids` but not in '
'`contributors` on node {1}').format(
user_id,
node._id,
)
)
class WatchConfig(StoredObject):
_id = fields.StringField(primary=True, default=lambda: str(ObjectId()))
node = fields.ForeignField('Node', backref='watched')
digest = fields.BooleanField(default=False)
immediate = fields.BooleanField(default=False)
def __repr__(self):
return '<WatchConfig(node="{self.node}")>'.format(self=self)
class PrivateLink(StoredObject):
_id = fields.StringField(primary=True, default=lambda: str(ObjectId()))
date_created = fields.DateTimeField(auto_now_add=datetime.datetime.utcnow)
key = fields.StringField(required=True)
name = fields.StringField()
is_deleted = fields.BooleanField(default=False)
anonymous = fields.BooleanField(default=False)
nodes = fields.ForeignField('node', list=True, backref='shared')
creator = fields.ForeignField('user', backref='created')
@property
def node_ids(self):
node_ids = [node._id for node in self.nodes]
return node_ids
def node_scale(self, node):
# node may be None if previous node's parent is deleted
if node is None or node.parent_id not in self.node_ids:
return -40
else:
offset = 20 if node.parent_node is not None else 0
return offset + self.node_scale(node.parent_node)
def to_json(self):
return {
"id": self._id,
"date_created": self.date_created.strftime('%m/%d/%Y %I:%M %p UTC'),
"key": self.key,
"name": self.name,
"creator": {'fullname': self.creator.fullname, 'url': self.creator.profile_url},
"nodes": [{'title': x.title, 'url': x.url, 'scale': str(self.node_scale(x)) + 'px', 'category': x.category}
for x in self.nodes if not x.is_deleted],
"anonymous": self.anonymous
}
|
barbour-em/osf.io
|
website/project/model.py
|
Python
|
apache-2.0
| 89,823
|
[
"VisIt"
] |
d3eaaf5b7f66fde6c856be16676c94826a37a56de7b56abe17b27b8b431c0ae3
|
from nutils import *
from nutils.testing import *
import math, re
@parametrize
class gauss(TestCase):
# Gaussian quadrature and exact integration on different element types
maxdegree=7
exclude=frozenset()
def setUp(self):
super().setUp()
self.monomials = numpy.mgrid[ (slice(self.maxdegree),)*self.ndims ].reshape(self.ndims,-1).T
if self.istensor:
self.ref = element.getsimplex(1)**self.ndims
self.integrals = numpy.reciprocal((self.monomials+1.).prod(-1))
else:
self.ref = element.getsimplex(self.ndims)
gamma = numpy.vectorize(math.gamma)
self.integrals = gamma(self.monomials+1.).prod(-1) / gamma(self.ndims+1+self.monomials.sum(-1))
def test_gauss(self):
for degree in range(1, self.maxdegree+1):
with self.subTest(degree=degree):
points = self.ref.getpoints('gauss', degree)
for monomial, integral in zip(self.monomials, self.integrals):
result = numpy.dot(points.weights, numpy.prod(points.coords**monomial, axis=-1))
expect_exact = degree // 2 >= max(monomial) // 2 if self.istensor else degree >= sum(monomial)
if expect_exact:
self.assertAlmostEqual(result/integral, 1, msg='integration should be exact', places=12)
else:
self.assertNotAlmostEqual(result/integral, 1, msg='integration should not be exact', places=12)
# Counterexamples can be constructed, but in the case of monomials with maxdegree<8 this assert is verified
def test_weights(self):
for ischeme in {'gauss', 'uniform', 'bezier'} - self.exclude:
for degree in range(1, self.maxdegree+1):
with self.subTest(ischeme=ischeme, degree=degree):
points = self.ref.getpoints(ischeme, degree)
self.assertAlmostEqual(points.weights.sum(), self.ref.volume, places=14)
gauss('line', ndims=1, istensor=True)
gauss('quad', ndims=2, istensor=True)
gauss('hex', ndims=3, istensor=True)
gauss('tri', ndims=2, istensor=False)
gauss('tet', ndims=3, istensor=False, maxdegree=8, exclude={'uniform'})
|
wijnandhoitinga/nutils
|
tests/test_quadrature.py
|
Python
|
mit
| 2,061
|
[
"Gaussian"
] |
75fb0a6b551668869e934a180e0bdd5a7b2251def037a94fc4ab21593cb1dbba
|
from ovito import *
from ovito.io import *
from ovito.data import *
import numpy
node = import_file("../../files/LAMMPS/bonds.data.gz", atom_style = 'bond')
print(node.source)
node.source.create_bond_property(BondProperty.Type.Color)
node.source.create_user_bond_property("MyProperty", "int", 2)
values = numpy.ones(node.source.number_of_half_bonds)
node.source.create_user_bond_property("MyProperty2", "float", 1, values)
print("Number of data objects: ", len(node.source))
print(node.source.bond_properties)
print(node.source.bond_properties.bond_type)
print(list(node.source.bond_properties.keys()))
print(list(node.source.bond_properties.values()))
print(node.source.bond_properties["Bond Type"])
print(node.source.bond_properties.bond_type.array)
print(node.source.bond_properties["MyProperty2"].array)
|
srinath-chakravarthy/ovito
|
tests/scripts/test_suite/bond_properties.py
|
Python
|
gpl-3.0
| 812
|
[
"LAMMPS",
"OVITO"
] |
991344e19594edff0f844968e251833e73f72f362ef0a1da3cf2b74d20c1363c
|
# -*- coding: utf-8 -*-
# Author: Óscar Nájera
# License: 3-clause BSD
"""
========================
Backreferences Generator
========================
Reviews generated example files in order to keep track of used modules
"""
from __future__ import print_function
import ast
import os
# Try Python 2 first, otherwise load from Python 3
try:
import cPickle as pickle
except ImportError:
import pickle
class NameFinder(ast.NodeVisitor):
"""Finds the longest form of variable names and their imports in code
Only retains names from imported modules.
"""
def __init__(self):
super(NameFinder, self).__init__()
self.imported_names = {}
self.accessed_names = set()
def visit_Import(self, node, prefix=''):
for alias in node.names:
local_name = alias.asname or alias.name
self.imported_names[local_name] = prefix + alias.name
def visit_ImportFrom(self, node):
self.visit_Import(node, node.module + '.')
def visit_Name(self, node):
self.accessed_names.add(node.id)
def visit_Attribute(self, node):
attrs = []
while isinstance(node, ast.Attribute):
attrs.append(node.attr)
node = node.value
if isinstance(node, ast.Name):
# This is a.b, not e.g. a().b
attrs.append(node.id)
self.accessed_names.add('.'.join(reversed(attrs)))
else:
# need to get a in a().b
self.visit(node)
def get_mapping(self):
for name in self.accessed_names:
local_name = name.split('.', 1)[0]
remainder = name[len(local_name):]
if local_name in self.imported_names:
# Join import path to relative path
full_name = self.imported_names[local_name] + remainder
yield name, full_name
def get_short_module_name(module_name, obj_name):
""" Get the shortest possible module name """
parts = module_name.split('.')
short_name = module_name
for i in range(len(parts) - 1, 0, -1):
short_name = '.'.join(parts[:i])
try:
exec('from %s import %s' % (short_name, obj_name))
except Exception: # libraries can throw all sorts of exceptions...
# get the last working module name
short_name = '.'.join(parts[:(i + 1)])
break
return short_name
def identify_names(code):
"""Builds a codeobj summary by identifying and resolving used names
>>> code = '''
... from a.b import c
... import d as e
... print(c)
... e.HelloWorld().f.g
... '''
>>> for name, o in sorted(identify_names(code).items()):
... print(name, o['name'], o['module'], o['module_short'])
c c a.b a.b
e.HelloWorld HelloWorld d d
"""
finder = NameFinder()
try:
finder.visit(ast.parse(code))
except SyntaxError:
return {}
example_code_obj = {}
for name, full_name in finder.get_mapping():
# name is as written in file (e.g. np.asarray)
# full_name includes resolved import path (e.g. numpy.asarray)
splitted = full_name.rsplit('.', 1)
if len(splitted) == 1:
# module without attribute. This is not useful for
# backreferences
continue
module, attribute = splitted
# get shortened module name
module_short = get_short_module_name(module, attribute)
cobj = {'name': attribute, 'module': module,
'module_short': module_short}
example_code_obj[name] = cobj
return example_code_obj
def scan_used_functions(example_file, gallery_conf):
"""save variables so we can later add links to the documentation"""
example_code_obj = identify_names(open(example_file).read())
if example_code_obj:
codeobj_fname = example_file[:-3] + '_codeobj.pickle'
with open(codeobj_fname, 'wb') as fid:
pickle.dump(example_code_obj, fid, pickle.HIGHEST_PROTOCOL)
backrefs = set('{module_short}.{name}'.format(**entry)
for entry in example_code_obj.values()
if entry['module'].startswith(gallery_conf['doc_module']))
return backrefs
# XXX This figure:: uses a forward slash even on Windows, but the op.join's
# elsewhere will use backslashes...
THUMBNAIL_TEMPLATE = """
.. raw:: html
<div class="sphx-glr-thumbcontainer" tooltip="{snippet}">
.. only:: html
.. figure:: /{thumbnail}
:ref:`sphx_glr_{ref_name}`
.. raw:: html
</div>
"""
BACKREF_THUMBNAIL_TEMPLATE = THUMBNAIL_TEMPLATE + """
.. only:: not html
* :ref:`sphx_glr_{ref_name}`
"""
def _thumbnail_div(full_dir, fname, snippet, is_backref=False):
"""Generates RST to place a thumbnail in a gallery"""
thumb = os.path.join(full_dir, 'images', 'thumb',
'sphx_glr_%s_thumb.png' % fname[:-3])
ref_name = os.path.join(full_dir, fname).replace(os.path.sep, '_')
template = BACKREF_THUMBNAIL_TEMPLATE if is_backref else THUMBNAIL_TEMPLATE
return template.format(snippet=snippet, thumbnail=thumb, ref_name=ref_name)
def write_backreferences(seen_backrefs, gallery_conf,
target_dir, fname, snippet):
"""Writes down back reference files, which include a thumbnail list
of examples using a certain module"""
example_file = os.path.join(target_dir, fname)
backrefs = scan_used_functions(example_file, gallery_conf)
for backref in backrefs:
include_path = os.path.join(gallery_conf['mod_example_dir'],
'%s.examples' % backref)
seen = backref in seen_backrefs
with open(include_path, 'a' if seen else 'w') as ex_file:
if not seen:
heading = '\n\nExamples using ``%s``' % backref
ex_file.write(heading + '\n')
ex_file.write('^' * len(heading) + '\n')
ex_file.write(_thumbnail_div(target_dir, fname, snippet,
is_backref=True))
seen_backrefs.add(backref)
|
emmanuelle/multi-diffusion
|
doc/ext/sphinx_gallery/backreferences.py
|
Python
|
bsd-3-clause
| 6,128
|
[
"VisIt"
] |
f1d9280eda20357479b3d8441f830c10f8a606dd57418042c3d670ab42971421
|
#
# Copyright 2016 The BigDL Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either exp'
# ress or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from bigdl.orca.automl.model.base_pytorch_model import PytorchModelBuilder
from bigdl.orca.automl.auto_estimator import AutoEstimator
from bigdl.chronos.model.tcn import model_creator
from .base_automodel import BasePytorchAutomodel
class AutoTCN(BasePytorchAutomodel):
def __init__(self,
input_feature_num,
output_target_num,
past_seq_len,
future_seq_len,
optimizer,
loss,
metric,
metric_mode=None,
hidden_units=None,
levels=None,
num_channels=None,
kernel_size=7,
lr=0.001,
dropout=0.2,
backend="torch",
logs_dir="/tmp/auto_tcn",
cpus_per_trial=1,
name="auto_tcn",
remote_dir=None,
):
"""
Create an AutoTCN.
:param input_feature_num: Int. The number of features in the input
:param output_target_num: Int. The number of targets in the output
:param past_seq_len: Int. The number of historical steps used for forecasting.
:param future_seq_len: Int. The number of future steps to forecast.
:param optimizer: String or pyTorch optimizer creator function or
tf.keras optimizer instance.
:param loss: String or pytorch/tf.keras loss instance or pytorch loss creator function.
:param metric: String or customized evaluation metric function.
If string, metric is the evaluation metric name to optimize, e.g. "mse".
If callable function, it signature should be func(y_true, y_pred), where y_true and
y_pred are numpy ndarray. The function should return a float value as evaluation result.
:param metric_mode: One of ["min", "max"]. "max" means greater metric value is better.
You have to specify metric_mode if you use a customized metric function.
You don't have to specify metric_mode if you use the built-in metric in
bigdl.orca.automl.metrics.Evaluator.
:param hidden_units: Int or hp sampling function from an integer space. The number of hidden
units or filters for each convolutional layer. It is similar to `units` for LSTM.
It defaults to 30. We will omit the hidden_units value if num_channels is specified.
For hp sampling, see bigdl.orca.automl.hp for more details.
e.g. hp.grid_search([32, 64]).
:param levels: Int or hp sampling function from an integer space. The number of levels of
TemporalBlocks to use. It defaults to 8. We will omit the levels value if
num_channels is specified.
:param num_channels: List of integers. A list of hidden_units for each level. You could
specify num_channels if you want different hidden_units for different levels.
By default, num_channels equals to
[hidden_units] * (levels - 1) + [output_target_num].
:param kernel_size: Int or hp sampling function from an integer space.
The size of the kernel to use in each convolutional layer.
:param lr: float or hp sampling function from a float space. Learning rate.
e.g. hp.choice([0.001, 0.003, 0.01])
:param dropout: float or hp sampling function from a float space. Learning rate. Dropout
rate. e.g. hp.uniform(0.1, 0.3)
:param backend: The backend of the TCN model. We only support backend as "torch" for now.
:param logs_dir: Local directory to save logs and results. It defaults to "/tmp/auto_tcn"
:param cpus_per_trial: Int. Number of cpus for each trial. It defaults to 1.
:param name: name of the AutoTCN. It defaults to "auto_tcn"
:param remote_dir: String. Remote directory to sync training results and checkpoints. It
defaults to None and doesn't take effects while running in local. While running in
cluster, it defaults to "hdfs:///tmp/{name}".
"""
super().__init__()
# todo: support search for past_seq_len.
# todo: add input check.
if backend != "torch":
raise ValueError(f"We only support backend as torch. Got {backend}")
self.search_space = dict(
input_feature_num=input_feature_num,
output_feature_num=output_target_num,
past_seq_len=past_seq_len,
future_seq_len=future_seq_len,
nhid=hidden_units,
levels=levels,
num_channels=num_channels,
kernel_size=kernel_size,
lr=lr,
dropout=dropout,
)
self.metric = metric
self.metric_mode = metric_mode
model_builder = PytorchModelBuilder(model_creator=model_creator,
optimizer_creator=optimizer,
loss_creator=loss,
)
self.auto_est = AutoEstimator(model_builder=model_builder,
logs_dir=logs_dir,
resources_per_trial={"cpu": cpus_per_trial},
remote_dir=remote_dir,
name=name)
|
intel-analytics/BigDL
|
python/chronos/src/bigdl/chronos/autots/model/auto_tcn.py
|
Python
|
apache-2.0
| 6,024
|
[
"ORCA"
] |
5687c07d08750c212cbff81c24f4a1f0be2ff41c50976440ea8173fdbccac6e3
|
# This file is part of cclib (http://cclib.github.io), a library for parsing
# and interpreting the results of computational chemistry packages.
#
# Copyright (C) 2006-2014, the cclib development team
#
# The library is free software, distributed under the terms of
# the GNU Lesser General Public version 2.1 or later. You should have
# received a copy of the license along with cclib. You can also access
# the full license online at http://www.gnu.org/copyleft/lgpl.html.
"""Parser for Gaussian output files"""
from __future__ import print_function
import re
import numpy
# CJS changed . to cclib.parser
from cclib.parser import logfileparser
from cclib.parser import utils
from chemlab.db import ChemlabDB
cdb = ChemlabDB()
symbols = cdb.get('data', 'symbols')
class Gausscom(logfileparser.Logfile):
"""A Gaussian 98/03 com file."""
def __init__(self, *args, **kwargs):
# Call the __init__ method of the superclass
super(Gausscom, self).__init__(logname="Gausscom", *args, **kwargs)
def __str__(self):
"""Return a string representation of the object."""
return "Gaussian com file %s" % (self.filename)
def __repr__(self):
"""Return a representation of the object."""
return 'Gausscom("%s")' % (self.filename)
def before_parsing(self):
self.found_options = False
self.found_comment = False
self.found_charge = False
#self.found_geometry = False
self.found_e_correl = False
return
def after_parsing(self):
return
def extract(self, inputfile, line):
"""Extract information from the file object inputfile."""
if line[0] == '#' and not self.found_options:
self.found_options = True
elif line.strip() == '' and self.found_options:
if not self.found_comment:
self.found_comment = True
elif not self.found_charge:
self.found_charge = True
line = next(inputfile)
self.set_attribute('charge', int(line.split()[0]))
self.set_attribute('mult', int(line.split()[1]))
if not hasattr(self, "atomcoords"):
self.atomcoords = []
self.inputatoms = []
atomnames=[]
atomcoords = []
line = next(inputfile)
while line.strip() != '':
broken = line.split()
#can now handle where where fragments, e.g. Cl(Fragment=2)
atomnames.append(symbols.index(re.split('[(]', broken[0])[0]))
atomcoords.append(list(map(float, broken[1:4])))
line = next(inputfile)
self.atomcoords.append(atomcoords)
self.inputatoms = atomnames[:]
self.set_attribute('atomnos', self.inputatoms)
self.set_attribute('natom', len(atomnames))
else:
self.found_e_correl = True
if __name__ == "__main__":
import doctest, gausscomparser, sys
if len(sys.argv) == 1:
doctest.testmod(gausscomparser, verbose=False)
if len(sys.argv) >= 2:
parser = gausscomparser.Gausscom(sys.argv[1])
data = parser.parse()
if len(sys.argv) > 2:
for i in range(len(sys.argv[2:])):
if hasattr(data, sys.argv[2 + i]):
print(getattr(data, sys.argv[2 + i]))
|
chrisjsewell/PyGauss
|
pygauss/cclib_patch/parser/gausscomparser.py
|
Python
|
gpl-3.0
| 3,617
|
[
"Gaussian",
"cclib"
] |
4119716d8da58b36fcf737ae743f7331fc756ee32ffc61eeb1442cbb02130fcf
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
# Copyright 2017 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Autosuspend udev rule generator
This script is executed at build time to generate udev rules. The
resulting rules file is installed on the device, the script itself
is not.
"""
from __future__ import print_function
# List of USB devices (vendorid:productid) for which it is safe to enable
# autosuspend.
USB_IDS = []
# Host Controllers and internal hubs
USB_IDS += [
# Linux Host Controller (UHCI) (most older x86 boards)
'1d6b:0001',
# Linux Host Controller (EHCI) (all boards)
'1d6b:0002',
# Linux Host Controller (XHCI) (most newer boards)
'1d6b:0003',
# SMSC (Internal HSIC Hub) (most Exynos boards)
'0424:3503',
# Intel (Rate Matching Hub) (all x86 boards)
'05e3:0610',
# Intel (Internal Hub?) (peppy, falco)
'8087:0024',
# Genesys Logic (Internal Hub) (rambi)
'8087:8000',
# Microchip (Composite HID + CDC) (kefka)
'04d8:0b28',
]
# Webcams
USB_IDS += [
# Chicony (zgb)
'04f2:b1d8',
# Chicony (mario)
'04f2:b262',
# Chicony (stout)
'04f2:b2fe',
# Chicony (butterfly)
'04f2:b35f',
# Chicony (rambi)
'04f2:b443',
# Chicony (glados)
'04f2:b552',
# LiteOn (spring)
'058f:b001',
# Foxlink? (butterfly)
'05c8:0351',
# Foxlink? (butterfly)
'05c8:0355',
# Cheng Uei? (falco)
'05c8:036e',
# SuYin (parrot)
'064e:d251',
# Realtek (falco)
'0bda:571c',
# IMC Networks (squawks)
'13d3:5657',
# Sunplus (parrot)
'1bcf:2c17',
# (C-13HDO10B39N) (alex)
'2232:1013',
# (C-10HDP11538N) (lumpy)
'2232:1017',
# (Namuga) (link)
'2232:1033',
# (C-03FFM12339N) (daisy)
'2232:1037',
# (C-10HDO13531N) (peach)
'2232:1056',
# (NCM-G102) (samus)
'2232:6001',
# Acer (stout)
'5986:0299',
]
# Bluetooth Host Controller
USB_IDS += [
# Hon-hai (parrot)
'0489:e04e',
# Hon-hai (peppy)
'0489:e056',
# Hon-hai (Kahlee)
'0489:e09f',
# QCA6174A (delan)
'0489:e0a2',
# LiteOn (parrot)
'04ca:3006',
# LiteOn (aleena)
'04ca:3016',
# LiteOn (scarlet)
'04ca:301a',
# Realtek (blooglet)
'0bda:b00c',
# Atheros (stumpy, stout)
'0cf3:3004',
# Atheros (AR3011) (mario, alex, zgb)
'0cf3:3005',
# Atheros (stumyp)
'0cf3:3007',
# Atheros (butterfly)
'0cf3:311e',
# Atheros (scarlet)
'0cf3:e300',
# Marvell (rambi)
'1286:2046',
# Marvell (gru)
'1286:204e',
# Intel (rambi, samus)
'8087:07dc',
# Intel (strago, glados)
'8087:0a2a',
# Intel (octopus)
'8087:0aaa',
# Intel (hatch)
'8087:0026',
# Intel (atlas)
'8087:0025',
]
# WWAN (LTE)
USB_IDS += [
# Huawei (ME936) (kip)
'12d1:15bb',
# Fibocom (L850-GL) (coral, nautilus, sarien)
'2cb7:0007',
# Fibocom (NL668, NL652)
'2cb7:01a0',
]
# Mass Storage
USB_IDS += [
# Genesys (SD card reader) (lumpy, link, peppy)
'05e3:0727',
# Realtek (SD card reader) (mario, alex)
'0bda:0138',
# Realtek (SD card reader) (helios)
'0bda:0136',
# Realtek (SD card reader) (falco)
'0bda:0177',
]
# Security Key
USB_IDS += [
# Yubico.com
'1050:0211',
# Yubico.com (HID firmware)
'1050:0200',
# Google Titan key
'18d1:5026',
]
# USB Audio devices
USB_IDS += [
# Google USB-C to 3.5mm Digital Headphone Jack Adapter 'Mir'
'18d1:5025',
# Google USB-C to 3.5mm Digital Headphone Jack Adapter 'Mir' (HID only)
'18d1:5029',
# Google USB-C to 3.5mm Digital Headphone Jack Adapter 2018 'Condor'
'18d1:5034',
# Google Pixel USB-C Earbuds 'Blackbird'
'18d1:5033',
# Libratone Q Adapt In-Ear USB-C Earphones, Made for Google
'03eb:2433',
# Moshi USB-C to 3.5 mm Adapter/Charger, Made for Google
'282b:48f0',
# Moshi USB-C to 3.5 mm Adapter/Charger, Made for Google (HID only)
'282b:0026',
# AiAiAi TMA-2 C60 Cable, Made for Google
'0572:1a08',
# Apple USB-C to 3.5mm Headphone Jack Adapter
'05ac:110a',
]
# List of PCI devices (vendorid:deviceid) for which it is safe to enable
# autosuspend.
PCI_IDS = []
# Intel
PCI_IDS += [
# Host bridge
'8086:590c',
# i915
'8086:591e',
# proc_thermal
'8086:1903',
# SPT PCH xHCI controller
'8086:9d2f',
# CNP PCH xHCI controller
'8086:9ded',
# intel_pmc_core
'8086:9d21',
# i801_smbus
'8086:9d23',
# iwlwifi
'8086:095a',
# GMM
'8086:1911',
# Thermal
'8086:9d31',
# MME
'8086:9d3a',
# CrOS EC
'8086:9d4b',
# PCH SPI
'8086:9d24',
# SATA
'8086:02d3',
# RAM memory
'8086:02ef',
# ISA bridge
'8086:0284',
# Communication controller
'8086:02e0',
# Network controller
'8086:02f0',
# Serial bus controller
'8086:02a4',
# USB controller
'8086:02ed',
# Volteer xHCI controller
'8086:a0ed',
# Graphics
'8086:9b41',
# DSP
'8086:02f9',
# Host bridge
'8086:9b61',
# Host bridge
'8086:9b71',
# PCI Bridge
'8086:02b0',
# i915 (atlas)
'8086:591c',
# iwlwifi (atlas)
'8086:2526',
# i915 (kefka)
'8086:22b1',
# proc_thermal (kefka)
'8086:22dc',
# xchi_hdc (kefka)
'8086:22b5',
# snd_hda (kefka)
'8086:2284',
# pcieport (kefka)
'8086:22c8',
'8086:22cc',
# lpc_ich (kefka)
'8086:229c',
# iosf_mbi_pci (kefka)
'8086:2280',
]
# Samsung
PCI_IDS += [
# NVMe KUS030205M-B001
'144d:a806',
# NVMe MZVLB256HAHQ
'144d:a808',
]
# Lite-on
PCI_IDS += [
# 3C07110288
'14a4:9100',
]
# Seagate
PCI_IDS += [
# ZP256CM30011
'7089:5012',
]
# Kingston
PCI_IDS += [
# RBUSNS8154P3128GJ3
'2646:5008',
]
# Do not edit below this line. #################################################
UDEV_RULE = """\
ACTION!="add", GOTO="autosuspend_end"
SUBSYSTEM!="i2c|pci|usb", GOTO="autosuspend_end"
SUBSYSTEM=="i2c", GOTO="autosuspend_i2c"
SUBSYSTEM=="pci", GOTO="autosuspend_pci"
SUBSYSTEM=="usb", GOTO="autosuspend_usb"
# I2C rules
LABEL="autosuspend_i2c"
ATTR{name}=="cyapa", ATTR{power/control}="on", GOTO="autosuspend_end"
GOTO="autosuspend_end"
# PCI rules
LABEL="autosuspend_pci"
%(pci_rules)s\
GOTO="autosuspend_end"
# USB rules
LABEL="autosuspend_usb"
%(usb_rules)s\
GOTO="autosuspend_end"
# Enable autosuspend
LABEL="autosuspend_enable"
TEST=="power/control", ATTR{power/control}="auto", GOTO="autosuspend_end"
LABEL="autosuspend_end"
"""
def main():
pci_rules = ''
for dev_ids in PCI_IDS:
vendor, device = dev_ids.split(':')
pci_rules += ('ATTR{vendor}=="0x%s", ATTR{device}=="0x%s", '
'GOTO="autosuspend_enable"\n' % (vendor, device))
usb_rules = ''
for dev_ids in USB_IDS:
vid, pid = dev_ids.split(':')
usb_rules += ('ATTR{idVendor}=="%s", ATTR{idProduct}=="%s", '
'GOTO="autosuspend_enable"\n' % (vid, pid))
print(UDEV_RULE % {'pci_rules': pci_rules, 'usb_rules': usb_rules})
if __name__ == '__main__':
main()
|
phomes/systemd
|
tools/chromiumos/gen_autosuspend_rules.py
|
Python
|
gpl-2.0
| 7,260
|
[
"Octopus"
] |
c3af8118b0c6baf94910359059cabdec00d7dbd4c94ce422a231ce9e604ec82a
|
#!/usr/bin/python
import os
cwd=os.getcwd()
cwd = cwd + '/'
f1= open(cwd + 'dsb_script.py',"w+")
f2= open(cwd +'dsb_temp_script.py',"r")
f5= open(cwd +'dsb_temp_glmol.py',"r")
f4= open(cwd +'dsb_script_gl.py',"w+")
inf_t=[]
inp= open(cwd +'inputfile.txt',"r")
with inp as ins:
ins = [line.rstrip('\n') for line in ins]
for line in ins:
inf_t=line
inf_t2=inf_t.rstrip('.pdb')
inp.close()
flag_inputfile=0
if os.path.isfile("inputfile.pdb"):
flag_inputfile=1
if(flag_inputfile==1):
gl_input="inputfile.pdb"
if(flag_inputfile==0):
gl_input=inf_t
#print inf_t, inf_t2
ar=[]
data=[]
ar=f5.read()
f1.write('\nimport pymol \nfrom pymol import stored\nfrom pymol import cmd, CmdException\ncmd=pymol.cmd\ninput_file=\'%s\'\ncmd.load( input_file , \'%s\')\n'%(inf_t,inf_t2))
ar1=[]
data1=[]
ar1=f2.read()
f4.write('#!/usr/bin/python\nimport pymol \nfrom pymol import stored\nfrom pymol import cmd, CmdException\nimport export_to_gl as glmol\ncmd=pymol.cmd\ndef out_atoms(modelName):\n\tprint modelName\n')
input_file ='detect_saltbridge_2'
f3= open(cwd + input_file,"r")
data=f3.readlines()
k=0
data_len= len(data)
print data_len
if(data_len==1):
f1.write('a_data=[]\n')
f4.write('\n\ta_data=[]\n')
if(data_len>1):
f1.write('a_data=[')
f4.write('\n\ta_data=[')
#f1.write(data)
t=0;
for i in data[1:]:
i=i.rstrip('\n')
f1.write('\'%s\''%(str(i)))
f4.write('\'%s\''%(str(i)))
if(0 < t < data_len-2):
f1.write(',')
f4.write(',')
t=t+1
f1.write(']\n')
f4.write(']\n')
f1.write('def out_atoms():\n\tmodelName=\'%s\'\n'%(inf_t2))
f1.write(ar1)
f1.write('out_atoms()')
f4.write('\n')
f4.write(ar)
sst = 'dsb_glmol_sst.png'
impf = 'dsb_glmol.html'
f4.write('\ndef t_run():')
f4.write("\n\tinput_file=\'%s\'\n\tmodelName=\'inputfile\'\n\tcmd.load( input_file , \'inputfile\')\n\tout_atoms(modelName)\n\tglmol.dump_rep(modelName, \'%s\', \'%s\')\n\tpymol.cmd.quit()\nt_run()\n"%(gl_input,sst,impf))
f4.close()
f1.close()
f2.close()
f3.close()
f5.close()
|
S-John-S/MAT
|
dsb_main.py
|
Python
|
mit
| 1,968
|
[
"PyMOL"
] |
e380e5f9be9b30ed5d0d328522d24afc90b1472d98e5a3cc6414ba39e4b2b92d
|
"""
Sheet classes.
A Sheet is a two-dimensional arrangement of processing units,
typically modeling a neural region or a subset of cells in a neural
region. Any new Sheet classes added to this directory will
automatically become available for any model.
$Id$
"""
__version__='$Revision$'
# Imported here so that all Sheets will be in the same package
from topo.base.sheet import Sheet
from topo.base.projection import ProjectionSheet # pyflakes:ignore (API import)
from topo.base.cf import CFSheet
from topo.misc.generatorsheet import GeneratorSheet
# Imported here for ease of access by users
from topo.base.boundingregion import BoundingBox # pyflakes:ignore (API import)
from topo.base.sheet import activity_type # pyflakes:ignore (API import)
import numpy
import topo
import param
from topo.base.cf import MaskedCFIter
from topo.base.projection import Projection
from topo.base.simulation import FunctionEvent, PeriodicEventSequence
class ActivityCopy(Sheet):
"""
Copies incoming Activity patterns to its activity matrix and output port.
Trivial Sheet class that is useful primarily as a placeholder for
data that is computed elsewhere but that you want to appear as a
Sheet, e.g. when wrapping an external simulation.
"""
dest_ports=['Activity']
src_ports=['Activity']
def input_event(self,conn,data):
self.input_data=data
def process_current_time(self):
if hasattr(self, 'input_data'):
self.activity*=0
self.activity+=self.input_data
self.send_output(src_port='Activity',data=self.activity)
del self.input_data
class SequenceGeneratorSheet(GeneratorSheet):
"""
Sheet that generates a timed sequence of patterns.
This sheet will repeatedly generate the input_sequence, with the
given onsets. The sequence is repeated every self.period time
units. If the total length of the sequence is longer than
self.period, a warning is issued and the sequence repeats
immediately after completion.
"""
input_sequence = param.List(default=[],
doc="""The sequence of patterns to generate. Must be a list of
(onset,generator) tuples. An empty list defaults to the
single tuple: (0,self.input_generator), resulting in
identical behavior to an ordinary GeneratorSheet.""")
def __init__(self,**params):
super(SequenceGeneratorSheet,self).__init__(**params)
if not self.input_sequence:
self.input_sequence = [(0,self.input_generator)]
def start(self):
assert self.simulation
event_seq = []
for delay,gen in self.input_sequence:
event_seq.append(FunctionEvent(self.simulation.convert_to_time_type(delay),self.set_input_generator,gen))
event_seq.append(FunctionEvent(0,self.generate))
now = self.simulation.time()
self.event = PeriodicEventSequence(now+self.simulation.convert_to_time_type(self.phase),self.simulation.convert_to_time_type(self.period),event_seq)
self.simulation.enqueue_event(self.event)
def compute_joint_norm_totals(projlist,active_units_mask=True):
"""
Compute norm_total for each CF in each projection from a group to
be normalized jointly.
"""
# Assumes that all Projections in the list have the same r,c size
assert len(projlist)>=1
iterator = MaskedCFIter(projlist[0],active_units_mask=active_units_mask)
for junk,i in iterator():
sums = [p.flatcfs[i].norm_total for p in projlist]
joint_sum = numpy.add.reduce(sums)
for p in projlist:
p.flatcfs[i].norm_total=joint_sum
class JointNormalizingCFSheet(CFSheet):
"""
A type of CFSheet extended to support joint sum-based normalization.
For L1 normalization, joint normalization means normalizing the
sum of (the absolute values of) all weights in a set of
corresponding CFs in different Projections, rather than only
considering weights in the same CF.
This class provides a mechanism for grouping Projections (see
_port_match and _grouped_in_projections) and a learn() function
that computes the joint sums. Joint normalization also requires
having ConnectionField store and return a norm_total for each
neuron, and having an TransferFn that will respect this norm_total
rather than the strict total of the ConnectionField's weights. At
present, CFPOF_DivisiveNormalizeL1 and
CFPOF_DivisiveNormalizeL1_opt do use norm_total; others can be
extended to do something similar if necessary.
To enable joint normalization, you can declare that all the
incoming connections that should be normalized together each
have a dest_port of:
dest_port=('Activity','JointNormalize', 'AfferentGroup1'),
Then all those that have this dest_port will be normalized
together, as long as an appropriate TransferFn is being used.
"""
joint_norm_fn = param.Callable(default=compute_joint_norm_totals,doc="""
Function to use to compute the norm_total for each CF in each
projection from a group to be normalized jointly.""")
# JABALERT: Should check that whenever a connection is added to a
# group, it has the same no of cfs as the existing connections.
def start(self):
self._normalize_weights(active_units_mask=False)
# CEBALERT: rename active_units_mask and default to False
def _normalize_weights(self,active_units_mask=True):
"""
Apply the weights_output_fns for every group of Projections.
If active_units_mask is True, only active units will have
their weights normalized.
"""
for key,projlist in self._grouped_in_projections('JointNormalize'):
if key == None:
normtype='Individually'
else:
normtype='Jointly'
self.joint_norm_fn(projlist,active_units_mask)
self.debug(normtype + " normalizing:")
for p in projlist:
p.apply_learn_output_fns(active_units_mask=active_units_mask)
self.debug(' ',p.name)
def learn(self):
"""
Call the learn() method on every Projection to the Sheet, and
call the output functions (jointly if necessary).
"""
# Ask all projections to learn independently
for proj in self.in_connections:
if not isinstance(proj,Projection):
self.debug("Skipping non-Projection "+proj.name)
else:
proj.learn()
# Apply output function in groups determined by dest_port
self._normalize_weights()
class JointNormalizingCFSheet_Continuous(JointNormalizingCFSheet):
"""
CFSheet that runs continuously, with no 'resting' periods between pattern presentations.
Note that learning occurs only when the time is a whole number.
"""
def process_current_time(self):
if self.new_input:
self.new_input = False
if(float(topo.sim.time()) % 1.0 == 0.0):
#self.activate()
if (self.plastic):
self.learn()
#else:
self.activate()
_public = list(set([_k for _k,_v in locals().items() if isinstance(_v,type) and issubclass(_v,Sheet)]))
_public += [
"compute_joint_norm_totals",
"BoundingBox",
"ProjectionSheet",
"activity_type",
]
# Automatically discover all .py files in this directory.
import os,fnmatch
__all__ = _public + [f.split('.py')[0] for f in os.listdir(__path__[0]) if fnmatch.fnmatch(f,'[!._]*.py')]
del f,os,fnmatch
# By default, avoid loading modules that rely on external libraries
# that might not be present on this system.
__all__.remove('ptztracker')
|
ioam/svn-history
|
topo/sheet/__init__.py
|
Python
|
bsd-3-clause
| 7,841
|
[
"NEURON"
] |
0e34e4c74e7ad6ddad9e76ba403e18099f00c6281a87eaa9038789d3e69ecbe4
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Removing unique constraint on 'IndicatorData', fields ['indicator', 'feature', 'time']
db.delete_unique('profiles_indicatordata', ['indicator_id', 'feature_id', 'time'])
# Deleting field 'IndicatorData.time'
db.delete_column('profiles_indicatordata', 'time')
# Adding unique constraint on 'IndicatorData', fields ['indicator', 'feature']
db.create_unique('profiles_indicatordata', ['indicator_id', 'feature_id'])
def backwards(self, orm):
# Removing unique constraint on 'IndicatorData', fields ['indicator', 'feature']
db.delete_unique('profiles_indicatordata', ['indicator_id', 'feature_id'])
# Adding field 'IndicatorData.time'
db.add_column('profiles_indicatordata', 'time', self.gf('django.db.models.fields.CharField')(default='', max_length=20), keep_default=False)
# Adding unique constraint on 'IndicatorData', fields ['indicator', 'feature', 'time']
db.create_unique('profiles_indicatordata', ['indicator_id', 'feature_id', 'time'])
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'profiles.datadomain': {
'Meta': {'object_name': 'DataDomain'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'indicators': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['profiles.Indicator']", 'through': "orm['profiles.IndicatorDomain']", 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '20'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '20', 'db_index': 'True'})
},
'profiles.datasource': {
'Meta': {'object_name': 'DataSource'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'implementation': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
},
'profiles.geolevel': {
'Meta': {'object_name': 'GeoLevel'},
'data_sources': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['profiles.DataSource']", 'symmetrical': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.GeoLevel']", 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '200', 'db_index': 'True'})
},
'profiles.geomapping': {
'Meta': {'object_name': 'GeoMapping'},
'from_record': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'mappings_as_from'", 'to': "orm['profiles.GeoRecord']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'to_record': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'mappings_as_to'", 'symmetrical': 'False', 'to': "orm['profiles.GeoRecord']"})
},
'profiles.georecord': {
'Meta': {'unique_together': "(('level', 'geo_id', 'custom_name', 'owner'),)", 'object_name': 'GeoRecord'},
'components': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'components_rel_+'", 'blank': 'True', 'to': "orm['profiles.GeoRecord']"}),
'custom_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'geo_id': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.GeoLevel']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.GeoRecord']", 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100', 'db_index': 'True'})
},
'profiles.indicator': {
'Meta': {'object_name': 'Indicator'},
'data_source': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.DataSource']"}),
'formula': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'levels': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['profiles.GeoLevel']", 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100', 'db_index': 'True'})
},
'profiles.indicatordata': {
'Meta': {'unique_together': "(('indicator', 'feature'),)", 'object_name': 'IndicatorData'},
'feature': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.GeoRecord']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'indicator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.Indicator']"}),
'moe': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'value': ('django.db.models.fields.DecimalField', [], {'max_digits': '10', 'decimal_places': '2'})
},
'profiles.indicatordomain': {
'Meta': {'object_name': 'IndicatorDomain'},
'default': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'domain': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.DataDomain']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'indicator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.Indicator']"})
},
'profiles.indicatorpart': {
'Meta': {'object_name': 'IndicatorPart'},
'data_source': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.DataSource']"}),
'formula': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'indicator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.Indicator']"}),
'time': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profiles.Time']"})
},
'profiles.time': {
'Meta': {'object_name': 'Time'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'sort': ('django.db.models.fields.DecimalField', [], {'max_digits': '5', 'decimal_places': '1'})
}
}
complete_apps = ['profiles']
|
ProvidencePlan/Profiles
|
communityprofiles/profiles/oldmigrations/0004_remove_indicatordata_time.py
|
Python
|
mit
| 10,759
|
[
"MOE"
] |
97edf714757b42a233d72d1ed8f0861142207162cff092de8c128644d13d747b
|
# -*- coding: utf-8 -*-
# This file is part of MOOSE simulator: http://moose.ncbs.res.in.
# MOOSE is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# MOOSE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Foobar. If not, see <http://www.gnu.org/licenses/>.
"""multiscale_config.py:
Last modified: Sat Jan 18, 2014 05:01PM
"""
__author__ = "Dilawar Singh"
__copyright__ = "Copyright 2013, NCBS Bangalore"
__credits__ = ["NCBS Bangalore", "Bhalla Lab"]
__license__ = "GNU GPL"
__version__ = "1.0.0"
__maintainer__ = "Dilawar Singh"
__email__ = "dilawars@ncbs.res.in"
__status__ = "Development"
import logging
import datetime
import time
import os
# Universal paths
nmlPath = '/neuroml'
nmlCellPath = os.path.join(nmlPath, 'cells')
libraryPath = os.path.join(nmlPath, 'cells')
cellPath = nmlCellPath
elecPath = os.path.join(nmlPath, 'electrical')
mumblePath = '/mumbl'
st = time.time()
st = datetime.datetime.fromtimestamp(st).strftime('%Y-%m-%d-%H%M')
logDir = os.environ['HOME']
logFile = os.path.join(logDir, ".moose", 'mumble.log')
if not os.path.isdir(logDir):
os.makedirs(logDir)
if os.path.exists(logFile):
os.rename(logFile, '{}_{}'.format(logFile, st))
logging.basicConfig(filename=logFile, level=logging.DEBUG)
mooseLogger = logging.getLogger()
disbleCopyingOfObject = True
|
dharmasam9/moose-core
|
python/moose/moose_config.py
|
Python
|
gpl-3.0
| 1,820
|
[
"MOOSE"
] |
275f063802ddc4cd1dc4503c0e27af52b9fb495774f468aa43108348095cf665
|
import sys
import pysam
samfile = pysam.Samfile( "ex1.bam", "rb" )
print "###################"
# check different ways to iterate
print len(list(samfile.fetch()))
print len(list(samfile.fetch( "chr1", 10, 200 )))
print len(list(samfile.fetch( region="chr1:10-200" )))
print len(list(samfile.fetch( "chr1" )))
print len(list(samfile.fetch( region="chr1")))
print len(list(samfile.fetch( "chr2" )))
print len(list(samfile.fetch( region="chr2")))
print len(list(samfile.fetch()))
print len(list(samfile.fetch( "chr1" )))
print len(list(samfile.fetch( region="chr1")))
print len(list(samfile.fetch()))
print len(list(samfile.pileup( "chr1", 10, 200 )))
print len(list(samfile.pileup( region="chr1:10-200" )))
print len(list(samfile.pileup( "chr1" )))
print len(list(samfile.pileup( region="chr1")))
print len(list(samfile.pileup( "chr2" )))
print len(list(samfile.pileup( region="chr2")))
print len(list(samfile.pileup()))
print len(list(samfile.pileup()))
print "########### fetch with callback ################"
def my_fetch_callback( alignment ): print str(alignment)
samfile.fetch( region="chr1:10-200", callback=my_fetch_callback )
print "########## pileup with callback ################"
def my_pileup_callback( column ): print str(column)
samfile.pileup( region="chr1:10-200", callback=my_pileup_callback )
print "##########iterator row #################"
iter = pysam.IteratorRow( samfile, 0, 10, 200)
for x in iter: print str(x)
print "##########iterator col #################"
iter = pysam.IteratorColumn( samfile, 0, 10, 200 )
for x in iter: print str(x)
print "#########row all##################"
iter = pysam.IteratorRowAll( samfile )
for x in iter: print str(x)
print "###################"
class Counter:
mCounts = 0
def __call__(self, alignment):
self.mCounts += 1
c = Counter()
samfile.fetch( "chr1:10-200", c )
print "counts=", c.mCounts
sys.exit(0)
print samfile.getTarget( 0 )
print samfile.getTarget( 1 )
for p in pysam.pileup( "-c", "ex1.bam" ):
print str(p)
print pysam.pileup.getMessages()
for p in pysam.pileup( "-c", "ex1.bam", raw=True ):
print str(p),
print "###########################"
samfile = pysam.Samfile( "ex2.sam.gz", "r" )
print "num targets=", samfile.getNumTargets()
iter = pysam.IteratorRowAll( samfile )
for x in iter: print str(x)
samfile.close()
print "###########################"
samfile = pysam.Samfile( "ex2.sam.gz", "r" )
def my_fetch_callback( alignment ):
print str(alignment)
try:
samfile.fetch( "chr1:10-20", my_fetch_callback )
except AssertionError:
print "caught fetch exception"
samfile.close()
print "###########################"
samfile = pysam.Samfile( "ex2.sam.gz", "r" )
def my_pileup_callback( pileups ):
print str(pileups)
try:
samfile.pileup( "chr1:10-20", my_pileup_callback )
except NotImplementedError:
print "caught pileup exception"
# playing arount with headers
samfile = pysam.Samfile( "ex3.sam", "r" )
print samfile.targets
print samfile.lengths
print samfile.text
print samdile.header
header = samfile.header
samfile.close()
header["HD"]["SO"] = "unsorted"
outfile = pysam.Samfile( "out.sam", "wh",
header = header )
outfile.close()
|
genome-vendor/chimerascan
|
chimerascan/pysam/tests/example.py
|
Python
|
gpl-3.0
| 3,206
|
[
"pysam"
] |
fe0b1fb87e899b4cceea4f742ee7d379255615a6b60fcf51b364fde6bcc859a6
|
"""
This module implements the base functionality for MCMC-based samplers for NNs.
"""
import logging
import numpy as np
import scipy as sp
import tensorflow as tf
from sampler import Sampler, SampleStats
GRADIENT_CLIP_VALUE = 1e5
class MCMC_sampler(Sampler):
"""
Base class for MCMC (HMC/LD) -based samplers for NNs.
"""
def __new__(cls, **kwargs):
""" Creates a new MCMCSampler object. """
sampler = super().__new__(cls)
# additional non-core parameters
sampler._properties['noise_precision'] = 100. # precision of the Gaussian used to model the noise
sampler._properties['weights_precision'] = .01 # precision of the Gaussian prior on network parameters
sampler._properties['resample_noise_precision'] = False
sampler._properties['resample_weights_precision'] = False
sampler._properties['seek_step_sizes'] = False
sampler._properties['anneal_step_sizes'] = False
sampler._properties['fade_in_velocities'] = False
return sampler
def __init__(self, loss_fn=None, initial_position=None, test_model=None, batch_size=None, burn_in=0,
step_sizes=.0001, step_probabilities=1., **kwargs):
"""
Creates a new MCMC_sampler object.
:param loss_fn: Target loss function without regularisaion terms
:param initial_position: Initial network weights as a 2-d array of shape [number of chains, number of weights]
:param test_model: The model used on the test data. Default=None
:param batch_size: Batch size used for stochastic sampling methods. Default=None
:param burn_in: Number of burn-in samples. Default=0
:param step_sizes: Step size or a list of step sizes. Default=.0001
:param step_probabilities: Probabilities to choose a step from step_sizes, must sum to 1. Default=1
"""
super().__init__(**kwargs)
self.loss_fn = loss_fn
self.test_model = test_model
self.initial_position = np.asarray(initial_position, dtype=np.float32)
self.position_shape = self.initial_position.shape
self.position_size = self.initial_position.shape[1] # total number of parameters of one network
# data and parameter shapes
self.chains_num = self.initial_position.shape[0] # number of chains to run in parallel
self.batch_size = batch_size if batch_size is not None else self.train_size
self.batch_x_shape = (self.batch_size, self.input_dim)
self.batch_y_shape = (self.batch_size, self.output_dim)
# common parameters
self.step_sizes = np.atleast_1d(np.asarray(step_sizes, dtype=np.float32))
self.step_probabilities = np.atleast_1d(np.asarray(step_probabilities, dtype=np.float32))
self.burn_in = burn_in
self.step_multiplier = np.ones(shape=(self.chains_num,), dtype=np.float32)
# monitor acceptance rate for reporting
self.avg_acceptance_rate = np.ones(shape=(self.chains_num,), dtype=np.float32)
self.avg_acceptance_rate_lambda = 0.99
self._has_burned_in = False
def __repr__(self):
s = super().__repr__()
s += f'Chains num: {self.chains_num}\n'
s += f'Batch size: {self.batch_size}\n'
s += f'Position size: {self.position_size}\n'
s += f'Precisions: noise = {self.noise_precision}, weights = {self.weights_precision}\n'
s += f'Resample precision: noise = {self.resample_noise_precision}, '
s += f'weights = {self.resample_weights_precision}\n'
s += f'Burn in: {self.burn_in}\n'
s += f'Seek step sizes: {self.seek_step_sizes}\n'
s += f'Anneal step sizes: {self.anneal_step_sizes}\n'
s += f'Fade in velocities: {self.fade_in_velocities}\n'
s += 'Step sizes: {}\n'.format(np.array_str(self.step_sizes).replace('\n', ''))
s += 'Step probabilities: {}\n'.format(np.array_str(self.step_probabilities).replace('\n', ''))
return s
def _construct(self, **kwargs):
""" Constructs computational graph for the model. """
# feeds
self._feed_dict = {} # all values fed to TF
self._create_feeds()
# fetches
self._fetch_dict = {} # everything to be fetched from TF session
self._debug = None
# updated position + acceptance result, will be overridden by transition step
self._updated_position_value = np.array(self.initial_position, dtype=np.float32)
self._updated_position = self._position
self._accepted_value = np.ones(shape=(self.chains_num,), dtype=np.float32)
self._accepted = tf.ones(shape=(self.chains_num,), dtype=np.float32)
self._construct_transition_step()
self._construct_fetches()
self._fetch_dict['_updated_position_value'] = self._updated_position
self._fetch_dict['_accepted_value'] = self._accepted
self._debug_value = None
if self._debug is not None:
self._fetch_dict['_debug_value'] = self._debug
def _create_feeds(self):
""" Creates TF placeholders for positions, training sets and common parameters. """
# "*_value" fields contain the corresponding local values updated at every sample draw and fed to placeholders
# position
self._position_value = np.array(self.initial_position, dtype=np.float32)
self._position = tf.placeholder(tf.float32, shape=self.position_shape, name='position')
self._feed_dict[self._position] = lambda: self._position_value
# current training batch
self._batch_train_x_value = None
self._batch_train_x = tf.placeholder(tf.float32, shape=self.batch_x_shape, name='train_x')
self._feed_dict[self._batch_train_x] = lambda: self._batch_train_x_value
self._batch_train_y_value = None
self._batch_train_y = tf.placeholder(tf.float32, shape=self.batch_y_shape, name='train_y')
self._feed_dict[self._batch_train_y] = lambda: self._batch_train_y_value
self._current_step_size_value = None
self._current_step_size = tf.placeholder(tf.float32, shape=(self.chains_num,), name='step_size')
self._feed_dict[self._current_step_size] = lambda: self._current_step_size_value
# precisions
self._noise_precision_value = self.noise_precision
self._noise_precision = tf.placeholder(tf.float32, shape=(), name='noise_precision')
self._feed_dict[self._noise_precision] = lambda: self._noise_precision_value
self._weights_precision_value = self.weights_precision
self._weights_precision = tf.placeholder(tf.float32, shape=(), name='weights_precision')
self._feed_dict[self._weights_precision] = lambda: self._weights_precision_value
# other
self._burn_in_ratio = tf.placeholder(tf.float32, shape=(), name='burn_in_ratio')
self._feed_dict[self._burn_in_ratio] = lambda: self._get_burn_in_ratio(skip=.0, cut=.9)
def _construct_fetches(self):
""" Constructs fetches for target loss and average model weights. """
# target loss and EMA
self._target_loss_value = np.zeros(shape=(self.chains_num,), dtype=np.float32)
self._target_loss = self.loss_fn(self._updated_position, self._batch_train_x, self._batch_train_y)
self._fetch_dict['_target_loss_value'] = self._target_loss
self._target_loss_ema = np.zeros(shape=(self.chains_num,), dtype=np.float32)
# weight norm and EMA
self._weight_norm_value = np.zeros(shape=(self.chains_num,), dtype=np.float32)
self._weight_norm = self._weight_norm_fn(self._updated_position)
self._fetch_dict['_weight_norm_value'] = self._weight_norm
self._weight_norm_ema = np.zeros(shape=(self.chains_num,), dtype=np.float32)
def _log_likelihood(self, position):
""" Log-likelihood component. """
batch_adjustment = (self.train_size / self.batch_size)
return self._noise_precision * batch_adjustment * self.loss_fn(position, self._batch_train_x,
self._batch_train_y)
def _d_log_likelihood(self, position):
""" Gradient of the log-likelihood component. """
dL = tf.gradients(tf.reduce_sum(self._log_likelihood(position)), position)[0]
return tf.clip_by_value(dL, -GRADIENT_CLIP_VALUE, GRADIENT_CLIP_VALUE)
def _weight_norm_fn(self, position):
""" Log-prior component. """
return tf.reduce_sum(tf.square(position), reduction_indices=[1])
def _log_prior(self, position):
""" Log-prior component. """
return self._weights_precision * self._weight_norm_fn(position)
def _d_log_prior(self, position):
""" Gradient of the log-likelihood component. """
dW = tf.gradients(tf.reduce_sum(self._log_prior(position)), position)[0]
return tf.clip_by_value(dW, -GRADIENT_CLIP_VALUE, GRADIENT_CLIP_VALUE) ## TODO: no need to?
def _energy_fn(self, position):
""" Energy function (E = logP(data|params) + logP(params)). """
return self._log_likelihood(position) + self._log_prior(position)
def _d_energy_fn(self, position):
""" Gradient of the energy function. """
return self._d_log_likelihood(position) + self._d_log_prior(position)
# Override in all subclasses
def _construct_transition_step(self):
""" Constructs computational graph for MCMC transition step. """
pass
def _simulate(self, session):
""" Simulates MCMC to draw a sample. """
# sample batch train data
self._sample_batch()
self._sample_step_size()
# construct feed dictionary
feed_dict = {k: v() for k, v in self._feed_dict.items()}
# run the simulation
update_dict = session.run(self._fetch_dict, feed_dict=feed_dict)
# update with fetched values
# TODO: should apply only position for now
for k, v in update_dict.items():
setattr(self, k, v)
if self._debug is not None:
logging.info(self._debug_value)
# check whether to discard the sample to prevent possible future instability
weight_deviation = self._updated_position_value.max() - self._updated_position_value.min()
if not (weight_deviation < 10 ** 9):
logging.info(f'Sample discarded to prevent instability: {weight_deviation:.2f}')
return None
# accept new position
self._position_value = self._updated_position_value
self._complete_simulation()
self.avg_acceptance_rate = self.avg_acceptance_rate_lambda * self.avg_acceptance_rate + \
(1. - self.avg_acceptance_rate_lambda) * self._accepted_value
# resample precisions
weight = .9
self._target_loss_ema = weight * self._target_loss_ema + (1. - weight) * self._target_loss_value
self._weight_norm_ema = weight * self._weight_norm_ema + (1. - weight) * self._weight_norm_value
self._resample_prior_params()
if not self._has_burned_in and self._burned_in():
self._has_burned_in = True
logging.info(f'Burned in. Samples = {self.sample_number}, step size = {self._current_step_size_value}.')
return self._position_value
def _sample_step_size(self):
""" Selects step size (1 per chain) for the current simulation. """
step_size = np.random.choice(self.step_sizes, size=self.chains_num, p=self.step_probabilities)
step_size = self._adjust_step_size(step_size)
# apply step size seek during burn in
if self.seek_step_sizes and not self._burned_in():
lower, upper = .90, .99
change = max(min(10. / self.burn_in, .0001), .01)
change *= (1 - self._get_burn_in_ratio(.35))
inc, dec = 1. + change, 1. - change
acr = self.avg_acceptance_rate
self.step_multiplier *= (acr < lower).astype(np.float32) * dec + (acr >= lower).astype(np.float32)
self.step_multiplier *= (acr > upper) * inc + (acr <= upper).astype(np.float32)
step_size *= self.step_multiplier
if self._burned_in() and self.anneal_step_sizes:
t = self.sample_number - self.burn_in
gamma = .51
base = .01 * self.burn_in
multiplier = base ** gamma / ((base + t) ** gamma)
step_size *= multiplier
self._current_step_size_value = step_size
# Override in subclasses to adjust the scale
def _adjust_step_size(self, step_size):
""" Adjusts step_size. """
return step_size
# Override in subclasses to update them using fetched values
def _complete_simulation(self):
""" Updates class values with fetched values. """
pass
def _sample_batch(self):
""" Samples training points for the current batch. """
indices = np.random.choice(self.train_size, self.batch_size, replace=False)
self._batch_train_x_value = self.train_x[indices, :]
self._batch_train_y_value = self.train_y[indices, :]
def _resample_prior_params(self):
""" Resamples parameters for the prior distributions. """
weight = .01 * self._get_burn_in_ratio(.5)
if weight == 0:
return
# noise
if self.resample_noise_precision:
precision = self._sample_noise_precision()
self._noise_precision_value = weight * precision + (1 - weight) * self._noise_precision_value
# weights
if self.resample_weights_precision:
precision = self._sample_weights_precision()
self._weights_precision_value = weight * precision + (1 - weight) * self._weights_precision_value
def _sample_noise_precision(self):
prior_observations = .1 * self.batch_size
shape = prior_observations + self.batch_size / 2
rate = prior_observations / self._noise_precision_value + np.mean(self._target_loss_ema) / 2
scale = 1. / rate
sample = np.clip(np.random.gamma(shape, scale), 10., 1000.)
return sample
def _sample_weights_precision(self):
prior_observations = .1 * self.position_size
shape = prior_observations + self.position_size / 2
rate = prior_observations / self._weights_precision_value + np.mean(self._weight_norm_ema) / 2
scale = 1. / rate
sample = np.clip(np.random.gamma(shape, scale), .1, 10.)
return sample
def _burned_in(self):
""" Whether burn in completed. """
return self.sample_number >= self.burn_in
def _get_burn_in_ratio(self, skip=.0, cut=.0):
""" Burn in phase progress. """
burn_in = self.burn_in * (1. - cut)
if self.sample_number >= burn_in:
return 1.
skip *= self.burn_in
sample_number = self.sample_number - skip
if sample_number <= 0:
return 0.
base = burn_in - skip
ratio = sample_number / base
ratio = 3. * ratio ** 2 - 2. * ratio ** 3 # smooth both ends in a sine-shaped manner
return ratio
def _transpose_mul(self, a, b):
""" Shortcut for multiplication with a transposed matrix. """
return tf.transpose(tf.mul(tf.transpose(a), b))
def _sample_posterior(self, session=None, return_stats=False, **kwargs):
""" Returns a new sample obtained via simulation. """
stats = None
sample = self._simulate(session)
if return_stats:
stats = [self._collect_stats(i) for i in range(self.chains_num)]
return sample, stats
def _sample_predictive(self, session=None, return_stats=False, is_discarded=False, **kwargs):
""" Returns a new sample obtained via simulation. """
posterior_sample = None
for i in range(self.draw_retries_num):
posterior_sample, _ = self._sample_posterior(session=session, return_stats=False, **kwargs)
if posterior_sample is not None:
break
if posterior_sample is None:
return None, None
if is_discarded:
return self.test_x, None
model, parameters = self.test_model
collected_samples = list()
collected_stats = list()
for i in range(posterior_sample.shape[0]):
model_params = np.reshape(posterior_sample[i], (1, posterior_sample[i].shape[0]))
sample = session.run(model, feed_dict={parameters: model_params})
stats = None
if sample is not None and return_stats:
stats = self._collect_stats(i)
collected_samples.append(sample)
collected_stats.append(stats)
return collected_samples, collected_stats
def _collect_stats(self, chain):
stats = SampleStats(time=self._running_time(),
loss=self._report_loss(chain),
norm=self._weight_norm_value[chain] / self.position_size,
rate=self.avg_acceptance_rate[chain],
step=self._current_step_size_value[chain],
noise_var=self._report_noise_variance(),
weights_var=self._report_weights_variance())
return stats
def _report_loss(self, chain):
target_loss = self._target_loss_value[chain]
if self.output_dim > 1:
return target_loss
return (self.train_y_std[0] ** 2) * target_loss / self.batch_size
def _report_noise_variance(self):
var = 1. / self._noise_precision_value
if self.output_dim > 1:
return var
return (self.train_y_std[0] ** 2) * var
def _report_weights_variance(self):
if self._weights_precision_value == 0:
return 1.
return 1. / self._weights_precision_value
@classmethod
def model_from_position(cls, layer_descriptions, position_tensor, input_tensor, use_softmax=False):
""" Creates TF model from the specified position and description. """
offset = 0
model = input_tensor
for i in range(1, len(layer_descriptions)):
previous_layer = layer_descriptions[i - 1]
current_layer = layer_descriptions[i]
previous_layer_size = previous_layer[0]
current_layer_size = current_layer[0]
weights_size = previous_layer_size * current_layer_size
biases_size = current_layer_size
weights = tf.slice(position_tensor, [0, offset], [1, weights_size])
weights = tf.reshape(weights, shape=[previous_layer_size, current_layer_size])
offset += weights_size
biases = tf.slice(position_tensor, [0, offset], [1, biases_size])
biases = tf.reshape(biases, shape=[1, biases_size])
offset += biases_size
model = tf.matmul(model, weights) + biases
if i != len(layer_descriptions) - 1:
model = tf.nn.relu(model)
elif use_softmax and layer_descriptions[-1][0] > 1:
model = tf.nn.softmax(model)
return model
@classmethod
def model_chain_from_position(cls, chains_num, layer_descriptions, position_tensor, input_tensor):
""" Creates multiple-chain model from the specified position and description. """
positions = tf.split(0, chains_num, position_tensor)
m = []
for i in range(chains_num):
m.append(cls.model_from_position(layer_descriptions, positions[i], input_tensor))
models = tf.pack(m)
return models
@classmethod
def create_random_position(cls, chains_num, layers_description):
""" Creates randomly initialised position for the specified model. """
pos_size = cls.get_model_parameters_size(layers_description)
# position = np.random.randn(chains_num, pos_size).astype(np.float32)
position = sp.stats.truncnorm.rvs(-1, 1, size=(chains_num, pos_size)).astype(np.float32)
position = np.random.randn(chains_num, pos_size).astype(np.float32)
return position
@classmethod
def get_mse_loss(cls, chains_num, layers_description):
""" Returns MSE loss for the given model. """
def mse_loss(position, tx, ty):
model = cls.model_chain_from_position(chains_num, layers_description, position, tx)
loss = tf.reduce_sum((ty - model) ** 2, reduction_indices=[1, 2])
return loss
return mse_loss
@classmethod
def get_ce_loss(cls, chains_num, layers_description):
def ce_loss(position, tx, ty):
""" Returns cross-entropy loss for the given model. """
model = cls.model_chain_from_position(chains_num, layers_description, position, tx)
model = tf.reshape(model, shape=(chains_num * model.get_shape()[1].value, -1))
ty = tf.tile(ty, [chains_num, 1])
loss = tf.nn.softmax_cross_entropy_with_logits(model, ty)
# l = tf.nn.sparse_softmax_cross_entropy_with_logits(m, ty)
loss = tf.reshape(loss, shape=(chains_num, -1))
loss = tf.reduce_sum(loss, reduction_indices=[1])
return loss
return ce_loss
|
myshkov/bnn-analysis
|
models/mcmc_sampler.py
|
Python
|
mit
| 21,351
|
[
"Gaussian"
] |
437127eac9345cc6e795fcc7f87c45f700cda0d470da5d372b57aeb47c87ab44
|
#
# This source file is part of appleseed.
# Visit http://appleseedhq.net/ for additional information and resources.
#
# This software is released under the MIT license.
#
# Copyright (c) 2012-2013 Esteban Tovagliari, Jupiter Jazz Limited
# Copyright (c) 2014-2017 Esteban Tovagliari, The appleseedhq Organization
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
from _appleseedpython import *
from logtarget import *
|
gospodnetic/appleseed
|
src/appleseed.python/__init__.py
|
Python
|
mit
| 1,430
|
[
"VisIt"
] |
882aa59abfe6095cae328b39b070ff64a2033ec1f87fad639a97ae86075493f3
|
# Orca
#
# Copyright 2006-2008 Sun Microsystems Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., Franklin Street, Fifth Floor,
# Boston MA 02110-1301 USA.
"""Provides getPhoneticName method that maps each letter of the
alphabet into its localized phonetic equivalent."""
__id__ = "$Id$"
__version__ = "$Revision$"
__date__ = "$Date$"
__copyright__ = "Copyright (c) 2006-2008 Sun Microsystems Inc."
__license__ = "LGPL"
from .orca_i18n import _
# Translators: this is a structure to assist in the generation of
# spoken military-style spelling. For example, 'abc' becomes 'alpha
# bravo charlie'.
#
# It is a simple structure that consists of pairs of
#
# letter : word(s)
#
# where the letter and word(s) are separate by colons and each
# pair is separated by commas. For example, we see:
#
# a : alpha, b : bravo, c : charlie,
#
# And so on. The complete set should consist of all the letters from
# the alphabet for your language paired with the common
# military/phonetic word(s) used to describe that letter.
#
# The Wikipedia entry
# http://en.wikipedia.org/wiki/NATO_phonetic_alphabet has a few
# interesting tidbits about local conventions in the sections
# "Additions in German, Danish and Norwegian" and "Variants".
#
__phonlist = _("a : alpha, b : bravo, c : charlie, "
"d : delta, e : echo, f : foxtrot, "
"g : golf, h : hotel, i : india, "
"j : juliet, k : kilo, l : lima, "
"m : mike, n : november, o : oscar, "
"p : papa, q : quebec, r : romeo, "
"s : sierra, t : tango, u : uniform, "
"v : victor, w : whiskey, x : xray, "
"y : yankee, z : zulu")
__phonnames = {}
for __pair in __phonlist.split(','):
__w = __pair.split(':')
__phonnames [__w[0].strip()] = __w[1].strip()
def getPhoneticName(character):
"""Given a character, return its phonetic name, which is typically
the 'military' term used for the character.
Arguments:
- character: the character to get the military name for
Returns a string representing the military name for the character
"""
return __phonnames.get(character, character)
|
ruibarreira/linuxtrail
|
usr/lib/python3/dist-packages/orca/phonnames.py
|
Python
|
gpl-3.0
| 2,822
|
[
"ORCA"
] |
a689e713c4d7500c84b43e2e4f0d0579b6652d4463955a02399b69df16c7304c
|
# -*- coding: iso-8859-1 -*-
############################################################
# Example 1: Simple basin hopping
############################################################
import numpy as np
import pele.potentials.lj as lj
import pele.basinhopping as bh
from pele.takestep import displace
natoms = 12
# random initial coordinates
coords=np.random.random(3*natoms)
potential = lj.LJ()
step = displace.RandomDisplacement(stepsize=0.5)
opt = bh.BasinHopping(coords, potential, takeStep=step)
opt.run(100)
# some visualization
try:
import pele.utils.pymolwrapper as pym
pym.start()
pym.draw_spheres(opt.coords, "A", 1)
except:
print "Could not draw using pymol, skipping this step"
|
kjs73/pele
|
examples/basinhopping_no_system_class/1_basic.py
|
Python
|
gpl-3.0
| 714
|
[
"PyMOL"
] |
65c7b4298c36fe10774ad800be5dc1a1f1f741539e415293f3672abd71587dba
|
# -*- coding: utf-8 -*-
# Authors: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Mathieu Blondel <mathieu@mblondel.org>
# Robert Layton <robertlayton@gmail.com>
# Andreas Mueller <amueller@ais.uni-bonn.de>
# Philippe Gervais <philippe.gervais@inria.fr>
# Lars Buitinck <larsmans@gmail.com>
# Joel Nothman <joel.nothman@gmail.com>
# License: BSD 3 clause
import itertools
import numpy as np
from scipy.spatial import distance
from scipy.sparse import csr_matrix
from scipy.sparse import issparse
from ..utils import check_array
from ..utils import gen_even_slices
from ..utils import gen_batches
from ..utils.fixes import partial
from ..utils.extmath import row_norms, safe_sparse_dot
from ..preprocessing import normalize
from ..externals.joblib import Parallel
from ..externals.joblib import delayed
from ..externals.joblib.parallel import cpu_count
from .pairwise_fast import _chi2_kernel_fast, _sparse_manhattan
# Utility Functions
def _return_float_dtype(X, Y):
"""
1. If dtype of X and Y is float32, then dtype float32 is returned.
2. Else dtype float is returned.
"""
if not issparse(X) and not isinstance(X, np.ndarray):
X = np.asarray(X)
if Y is None:
Y_dtype = X.dtype
elif not issparse(Y) and not isinstance(Y, np.ndarray):
Y = np.asarray(Y)
Y_dtype = Y.dtype
else:
Y_dtype = Y.dtype
if X.dtype == Y_dtype == np.float32:
dtype = np.float32
else:
dtype = np.float
return X, Y, dtype
def check_pairwise_arrays(X, Y, precomputed=False):
""" Set X and Y appropriately and checks inputs
If Y is None, it is set as a pointer to X (i.e. not a copy).
If Y is given, this does not happen.
All distance metrics should use this function first to assert that the
given parameters are correct and safe to use.
Specifically, this function first ensures that both X and Y are arrays,
then checks that they are at least two dimensional while ensuring that
their elements are floats. Finally, the function checks that the size
of the second dimension of the two arrays is equal, or the equivalent
check for a precomputed distance matrix.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples_a, n_features)
Y : {array-like, sparse matrix}, shape (n_samples_b, n_features)
precomputed : bool
True if X is to be treated as precomputed distances to the samples in
Y.
Returns
-------
safe_X : {array-like, sparse matrix}, shape (n_samples_a, n_features)
An array equal to X, guaranteed to be a numpy array.
safe_Y : {array-like, sparse matrix}, shape (n_samples_b, n_features)
An array equal to Y if Y was not None, guaranteed to be a numpy array.
If Y was None, safe_Y will be a pointer to X.
"""
X, Y, dtype = _return_float_dtype(X, Y)
if Y is X or Y is None:
X = Y = check_array(X, accept_sparse='csr', dtype=dtype)
else:
X = check_array(X, accept_sparse='csr', dtype=dtype)
Y = check_array(Y, accept_sparse='csr', dtype=dtype)
if precomputed:
if X.shape[1] != Y.shape[0]:
raise ValueError("Precomputed metric requires shape "
"(n_queries, n_indexed). Got (%d, %d) "
"for %d indexed." %
(X.shape[0], X.shape[1], Y.shape[0]))
elif X.shape[1] != Y.shape[1]:
raise ValueError("Incompatible dimension for X and Y matrices: "
"X.shape[1] == %d while Y.shape[1] == %d" % (
X.shape[1], Y.shape[1]))
return X, Y
def check_paired_arrays(X, Y):
""" Set X and Y appropriately and checks inputs for paired distances
All paired distance metrics should use this function first to assert that
the given parameters are correct and safe to use.
Specifically, this function first ensures that both X and Y are arrays,
then checks that they are at least two dimensional while ensuring that
their elements are floats. Finally, the function checks that the size
of the dimensions of the two arrays are equal.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples_a, n_features)
Y : {array-like, sparse matrix}, shape (n_samples_b, n_features)
Returns
-------
safe_X : {array-like, sparse matrix}, shape (n_samples_a, n_features)
An array equal to X, guaranteed to be a numpy array.
safe_Y : {array-like, sparse matrix}, shape (n_samples_b, n_features)
An array equal to Y if Y was not None, guaranteed to be a numpy array.
If Y was None, safe_Y will be a pointer to X.
"""
X, Y = check_pairwise_arrays(X, Y)
if X.shape != Y.shape:
raise ValueError("X and Y should be of same shape. They were "
"respectively %r and %r long." % (X.shape, Y.shape))
return X, Y
# Pairwise distances
def euclidean_distances(X, Y=None, Y_norm_squared=None, squared=False,
X_norm_squared=None):
"""
Considering the rows of X (and Y=X) as vectors, compute the
distance matrix between each pair of vectors.
For efficiency reasons, the euclidean distance between a pair of row
vector x and y is computed as::
dist(x, y) = sqrt(dot(x, x) - 2 * dot(x, y) + dot(y, y))
This formulation has two advantages over other ways of computing distances.
First, it is computationally efficient when dealing with sparse data.
Second, if one argument varies but the other remains unchanged, then
`dot(x, x)` and/or `dot(y, y)` can be pre-computed.
However, this is not the most precise way of doing this computation, and
the distance matrix returned by this function may not be exactly
symmetric as required by, e.g., ``scipy.spatial.distance`` functions.
Read more in the :ref:`User Guide <metrics>`.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples_1, n_features)
Y : {array-like, sparse matrix}, shape (n_samples_2, n_features)
Y_norm_squared : array-like, shape (n_samples_2, ), optional
Pre-computed dot-products of vectors in Y (e.g.,
``(Y**2).sum(axis=1)``)
squared : boolean, optional
Return squared Euclidean distances.
X_norm_squared : array-like, shape = [n_samples_1], optional
Pre-computed dot-products of vectors in X (e.g.,
``(X**2).sum(axis=1)``)
Returns
-------
distances : {array, sparse matrix}, shape (n_samples_1, n_samples_2)
Examples
--------
>>> from sklearn.metrics.pairwise import euclidean_distances
>>> X = [[0, 1], [1, 1]]
>>> # distance between rows of X
>>> euclidean_distances(X, X)
array([[ 0., 1.],
[ 1., 0.]])
>>> # get distance to origin
>>> euclidean_distances(X, [[0, 0]])
array([[ 1. ],
[ 1.41421356]])
See also
--------
paired_distances : distances betweens pairs of elements of X and Y.
"""
X, Y = check_pairwise_arrays(X, Y)
if X_norm_squared is not None:
XX = check_array(X_norm_squared)
if XX.shape == (1, X.shape[0]):
XX = XX.T
elif XX.shape != (X.shape[0], 1):
raise ValueError(
"Incompatible dimensions for X and X_norm_squared")
else:
XX = row_norms(X, squared=True)[:, np.newaxis]
if X is Y: # shortcut in the common case euclidean_distances(X, X)
YY = XX.T
elif Y_norm_squared is not None:
YY = np.atleast_2d(Y_norm_squared)
if YY.shape != (1, Y.shape[0]):
raise ValueError(
"Incompatible dimensions for Y and Y_norm_squared")
else:
YY = row_norms(Y, squared=True)[np.newaxis, :]
distances = safe_sparse_dot(X, Y.T, dense_output=True)
distances *= -2
distances += XX
distances += YY
np.maximum(distances, 0, out=distances)
if X is Y:
# Ensure that distances between vectors and themselves are set to 0.0.
# This may not be the case due to floating point rounding errors.
distances.flat[::distances.shape[0] + 1] = 0.0
return distances if squared else np.sqrt(distances, out=distances)
def pairwise_distances_argmin_min(X, Y, axis=1, metric="euclidean",
batch_size=500, metric_kwargs=None):
"""Compute minimum distances between one point and a set of points.
This function computes for each row in X, the index of the row of Y which
is closest (according to the specified distance). The minimal distances are
also returned.
This is mostly equivalent to calling:
(pairwise_distances(X, Y=Y, metric=metric).argmin(axis=axis),
pairwise_distances(X, Y=Y, metric=metric).min(axis=axis))
but uses much less memory, and is faster for large arrays.
Parameters
----------
X, Y : {array-like, sparse matrix}
Arrays containing points. Respective shapes (n_samples1, n_features)
and (n_samples2, n_features)
batch_size : integer
To reduce memory consumption over the naive solution, data are
processed in batches, comprising batch_size rows of X and
batch_size rows of Y. The default value is quite conservative, but
can be changed for fine-tuning. The larger the number, the larger the
memory usage.
metric : string or callable, default 'euclidean'
metric to use for distance computation. Any metric from scikit-learn
or scipy.spatial.distance can be used.
If metric is a callable function, it is called on each
pair of instances (rows) and the resulting value recorded. The callable
should take two arrays as input and return one value indicating the
distance between them. This works for Scipy's metrics, but is less
efficient than passing the metric name as a string.
Distance matrices are not supported.
Valid values for metric are:
- from scikit-learn: ['cityblock', 'cosine', 'euclidean', 'l1', 'l2',
'manhattan']
- from scipy.spatial.distance: ['braycurtis', 'canberra', 'chebyshev',
'correlation', 'dice', 'hamming', 'jaccard', 'kulsinski',
'mahalanobis', 'matching', 'minkowski', 'rogerstanimoto',
'russellrao', 'seuclidean', 'sokalmichener', 'sokalsneath',
'sqeuclidean', 'yule']
See the documentation for scipy.spatial.distance for details on these
metrics.
metric_kwargs : dict, optional
Keyword arguments to pass to specified metric function.
axis : int, optional, default 1
Axis along which the argmin and distances are to be computed.
Returns
-------
argmin : numpy.ndarray
Y[argmin[i], :] is the row in Y that is closest to X[i, :].
distances : numpy.ndarray
distances[i] is the distance between the i-th row in X and the
argmin[i]-th row in Y.
See also
--------
sklearn.metrics.pairwise_distances
sklearn.metrics.pairwise_distances_argmin
"""
dist_func = None
if metric in PAIRWISE_DISTANCE_FUNCTIONS:
dist_func = PAIRWISE_DISTANCE_FUNCTIONS[metric]
elif not callable(metric) and not isinstance(metric, str):
raise ValueError("'metric' must be a string or a callable")
X, Y = check_pairwise_arrays(X, Y)
if metric_kwargs is None:
metric_kwargs = {}
if axis == 0:
X, Y = Y, X
# Allocate output arrays
indices = np.empty(X.shape[0], dtype=np.intp)
values = np.empty(X.shape[0])
values.fill(np.infty)
for chunk_x in gen_batches(X.shape[0], batch_size):
X_chunk = X[chunk_x, :]
for chunk_y in gen_batches(Y.shape[0], batch_size):
Y_chunk = Y[chunk_y, :]
if dist_func is not None:
if metric == 'euclidean': # special case, for speed
d_chunk = safe_sparse_dot(X_chunk, Y_chunk.T,
dense_output=True)
d_chunk *= -2
d_chunk += row_norms(X_chunk, squared=True)[:, np.newaxis]
d_chunk += row_norms(Y_chunk, squared=True)[np.newaxis, :]
np.maximum(d_chunk, 0, d_chunk)
else:
d_chunk = dist_func(X_chunk, Y_chunk, **metric_kwargs)
else:
d_chunk = pairwise_distances(X_chunk, Y_chunk,
metric=metric, **metric_kwargs)
# Update indices and minimum values using chunk
min_indices = d_chunk.argmin(axis=1)
min_values = d_chunk[np.arange(chunk_x.stop - chunk_x.start),
min_indices]
flags = values[chunk_x] > min_values
indices[chunk_x][flags] = min_indices[flags] + chunk_y.start
values[chunk_x][flags] = min_values[flags]
if metric == "euclidean" and not metric_kwargs.get("squared", False):
np.sqrt(values, values)
return indices, values
def pairwise_distances_argmin(X, Y, axis=1, metric="euclidean",
batch_size=500, metric_kwargs=None):
"""Compute minimum distances between one point and a set of points.
This function computes for each row in X, the index of the row of Y which
is closest (according to the specified distance).
This is mostly equivalent to calling:
pairwise_distances(X, Y=Y, metric=metric).argmin(axis=axis)
but uses much less memory, and is faster for large arrays.
This function works with dense 2D arrays only.
Parameters
----------
X : array-like
Arrays containing points. Respective shapes (n_samples1, n_features)
and (n_samples2, n_features)
Y : array-like
Arrays containing points. Respective shapes (n_samples1, n_features)
and (n_samples2, n_features)
batch_size : integer
To reduce memory consumption over the naive solution, data are
processed in batches, comprising batch_size rows of X and
batch_size rows of Y. The default value is quite conservative, but
can be changed for fine-tuning. The larger the number, the larger the
memory usage.
metric : string or callable
metric to use for distance computation. Any metric from scikit-learn
or scipy.spatial.distance can be used.
If metric is a callable function, it is called on each
pair of instances (rows) and the resulting value recorded. The callable
should take two arrays as input and return one value indicating the
distance between them. This works for Scipy's metrics, but is less
efficient than passing the metric name as a string.
Distance matrices are not supported.
Valid values for metric are:
- from scikit-learn: ['cityblock', 'cosine', 'euclidean', 'l1', 'l2',
'manhattan']
- from scipy.spatial.distance: ['braycurtis', 'canberra', 'chebyshev',
'correlation', 'dice', 'hamming', 'jaccard', 'kulsinski',
'mahalanobis', 'matching', 'minkowski', 'rogerstanimoto',
'russellrao', 'seuclidean', 'sokalmichener', 'sokalsneath',
'sqeuclidean', 'yule']
See the documentation for scipy.spatial.distance for details on these
metrics.
metric_kwargs : dict
keyword arguments to pass to specified metric function.
axis : int, optional, default 1
Axis along which the argmin and distances are to be computed.
Returns
-------
argmin : numpy.ndarray
Y[argmin[i], :] is the row in Y that is closest to X[i, :].
See also
--------
sklearn.metrics.pairwise_distances
sklearn.metrics.pairwise_distances_argmin_min
"""
if metric_kwargs is None:
metric_kwargs = {}
return pairwise_distances_argmin_min(X, Y, axis, metric, batch_size,
metric_kwargs)[0]
def manhattan_distances(X, Y=None, sum_over_features=True,
size_threshold=5e8):
""" Compute the L1 distances between the vectors in X and Y.
With sum_over_features equal to False it returns the componentwise
distances.
Read more in the :ref:`User Guide <metrics>`.
Parameters
----------
X : array_like
An array with shape (n_samples_X, n_features).
Y : array_like, optional
An array with shape (n_samples_Y, n_features).
sum_over_features : bool, default=True
If True the function returns the pairwise distance matrix
else it returns the componentwise L1 pairwise-distances.
Not supported for sparse matrix inputs.
size_threshold : int, default=5e8
Unused parameter.
Returns
-------
D : array
If sum_over_features is False shape is
(n_samples_X * n_samples_Y, n_features) and D contains the
componentwise L1 pairwise-distances (ie. absolute difference),
else shape is (n_samples_X, n_samples_Y) and D contains
the pairwise L1 distances.
Examples
--------
>>> from sklearn.metrics.pairwise import manhattan_distances
>>> manhattan_distances([[3]], [[3]])#doctest:+ELLIPSIS
array([[ 0.]])
>>> manhattan_distances([[3]], [[2]])#doctest:+ELLIPSIS
array([[ 1.]])
>>> manhattan_distances([[2]], [[3]])#doctest:+ELLIPSIS
array([[ 1.]])
>>> manhattan_distances([[1, 2], [3, 4]],\
[[1, 2], [0, 3]])#doctest:+ELLIPSIS
array([[ 0., 2.],
[ 4., 4.]])
>>> import numpy as np
>>> X = np.ones((1, 2))
>>> y = 2 * np.ones((2, 2))
>>> manhattan_distances(X, y, sum_over_features=False)#doctest:+ELLIPSIS
array([[ 1., 1.],
[ 1., 1.]]...)
"""
X, Y = check_pairwise_arrays(X, Y)
if issparse(X) or issparse(Y):
if not sum_over_features:
raise TypeError("sum_over_features=%r not supported"
" for sparse matrices" % sum_over_features)
X = csr_matrix(X, copy=False)
Y = csr_matrix(Y, copy=False)
D = np.zeros((X.shape[0], Y.shape[0]))
_sparse_manhattan(X.data, X.indices, X.indptr,
Y.data, Y.indices, Y.indptr,
X.shape[1], D)
return D
if sum_over_features:
return distance.cdist(X, Y, 'cityblock')
D = X[:, np.newaxis, :] - Y[np.newaxis, :, :]
D = np.abs(D, D)
return D.reshape((-1, X.shape[1]))
def cosine_distances(X, Y=None):
"""
Compute cosine distance between samples in X and Y.
Cosine distance is defined as 1.0 minus the cosine similarity.
Read more in the :ref:`User Guide <metrics>`.
Parameters
----------
X : array_like, sparse matrix
with shape (n_samples_X, n_features).
Y : array_like, sparse matrix (optional)
with shape (n_samples_Y, n_features).
Returns
-------
distance matrix : array
An array with shape (n_samples_X, n_samples_Y).
See also
--------
sklearn.metrics.pairwise.cosine_similarity
scipy.spatial.distance.cosine (dense matrices only)
"""
# 1.0 - cosine_similarity(X, Y) without copy
S = cosine_similarity(X, Y)
S *= -1
S += 1
return S
# Paired distances
def paired_euclidean_distances(X, Y):
"""
Computes the paired euclidean distances between X and Y
Read more in the :ref:`User Guide <metrics>`.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Y : array-like, shape (n_samples, n_features)
Returns
-------
distances : ndarray (n_samples, )
"""
X, Y = check_paired_arrays(X, Y)
return row_norms(X - Y)
def paired_manhattan_distances(X, Y):
"""Compute the L1 distances between the vectors in X and Y.
Read more in the :ref:`User Guide <metrics>`.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Y : array-like, shape (n_samples, n_features)
Returns
-------
distances : ndarray (n_samples, )
"""
X, Y = check_paired_arrays(X, Y)
diff = X - Y
if issparse(diff):
diff.data = np.abs(diff.data)
return np.squeeze(np.array(diff.sum(axis=1)))
else:
return np.abs(diff).sum(axis=-1)
def paired_cosine_distances(X, Y):
"""
Computes the paired cosine distances between X and Y
Read more in the :ref:`User Guide <metrics>`.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Y : array-like, shape (n_samples, n_features)
Returns
-------
distances : ndarray, shape (n_samples, )
Notes
------
The cosine distance is equivalent to the half the squared
euclidean distance if each sample is normalized to unit norm
"""
X, Y = check_paired_arrays(X, Y)
return .5 * row_norms(normalize(X) - normalize(Y), squared=True)
PAIRED_DISTANCES = {
'cosine': paired_cosine_distances,
'euclidean': paired_euclidean_distances,
'l2': paired_euclidean_distances,
'l1': paired_manhattan_distances,
'manhattan': paired_manhattan_distances,
'cityblock': paired_manhattan_distances}
def paired_distances(X, Y, metric="euclidean", **kwds):
"""
Computes the paired distances between X and Y.
Computes the distances between (X[0], Y[0]), (X[1], Y[1]), etc...
Read more in the :ref:`User Guide <metrics>`.
Parameters
----------
X : ndarray (n_samples, n_features)
Array 1 for distance computation.
Y : ndarray (n_samples, n_features)
Array 2 for distance computation.
metric : string or callable
The metric to use when calculating distance between instances in a
feature array. If metric is a string, it must be one of the options
specified in PAIRED_DISTANCES, including "euclidean",
"manhattan", or "cosine".
Alternatively, if metric is a callable function, it is called on each
pair of instances (rows) and the resulting value recorded. The callable
should take two arrays from X as input and return a value indicating
the distance between them.
Returns
-------
distances : ndarray (n_samples, )
Examples
--------
>>> from sklearn.metrics.pairwise import paired_distances
>>> X = [[0, 1], [1, 1]]
>>> Y = [[0, 1], [2, 1]]
>>> paired_distances(X, Y)
array([ 0., 1.])
See also
--------
pairwise_distances : pairwise distances.
"""
if metric in PAIRED_DISTANCES:
func = PAIRED_DISTANCES[metric]
return func(X, Y)
elif callable(metric):
# Check the matrix first (it is usually done by the metric)
X, Y = check_paired_arrays(X, Y)
distances = np.zeros(len(X))
for i in range(len(X)):
distances[i] = metric(X[i], Y[i])
return distances
else:
raise ValueError('Unknown distance %s' % metric)
# Kernels
def linear_kernel(X, Y=None):
"""
Compute the linear kernel between X and Y.
Read more in the :ref:`User Guide <linear_kernel>`.
Parameters
----------
X : array of shape (n_samples_1, n_features)
Y : array of shape (n_samples_2, n_features)
Returns
-------
Gram matrix : array of shape (n_samples_1, n_samples_2)
"""
X, Y = check_pairwise_arrays(X, Y)
return safe_sparse_dot(X, Y.T, dense_output=True)
def polynomial_kernel(X, Y=None, degree=3, gamma=None, coef0=1):
"""
Compute the polynomial kernel between X and Y::
K(X, Y) = (gamma <X, Y> + coef0)^degree
Read more in the :ref:`User Guide <polynomial_kernel>`.
Parameters
----------
X : ndarray of shape (n_samples_1, n_features)
Y : ndarray of shape (n_samples_2, n_features)
coef0 : int, default 1
degree : int, default 3
Returns
-------
Gram matrix : array of shape (n_samples_1, n_samples_2)
"""
X, Y = check_pairwise_arrays(X, Y)
if gamma is None:
gamma = 1.0 / X.shape[1]
K = safe_sparse_dot(X, Y.T, dense_output=True)
K *= gamma
K += coef0
K **= degree
return K
def sigmoid_kernel(X, Y=None, gamma=None, coef0=1):
"""
Compute the sigmoid kernel between X and Y::
K(X, Y) = tanh(gamma <X, Y> + coef0)
Read more in the :ref:`User Guide <sigmoid_kernel>`.
Parameters
----------
X : ndarray of shape (n_samples_1, n_features)
Y : ndarray of shape (n_samples_2, n_features)
coef0 : int, default 1
Returns
-------
Gram matrix: array of shape (n_samples_1, n_samples_2)
"""
X, Y = check_pairwise_arrays(X, Y)
if gamma is None:
gamma = 1.0 / X.shape[1]
K = safe_sparse_dot(X, Y.T, dense_output=True)
K *= gamma
K += coef0
np.tanh(K, K) # compute tanh in-place
return K
def rbf_kernel(X, Y=None, gamma=None):
"""
Compute the rbf (gaussian) kernel between X and Y::
K(x, y) = exp(-gamma ||x-y||^2)
for each pair of rows x in X and y in Y.
Read more in the :ref:`User Guide <rbf_kernel>`.
Parameters
----------
X : array of shape (n_samples_X, n_features)
Y : array of shape (n_samples_Y, n_features)
gamma : float
Returns
-------
kernel_matrix : array of shape (n_samples_X, n_samples_Y)
"""
X, Y = check_pairwise_arrays(X, Y)
if gamma is None:
gamma = 1.0 / X.shape[1]
K = euclidean_distances(X, Y, squared=True)
K *= -gamma
np.exp(K, K) # exponentiate K in-place
return K
def cosine_similarity(X, Y=None, dense_output=True):
"""Compute cosine similarity between samples in X and Y.
Cosine similarity, or the cosine kernel, computes similarity as the
normalized dot product of X and Y:
K(X, Y) = <X, Y> / (||X||*||Y||)
On L2-normalized data, this function is equivalent to linear_kernel.
Read more in the :ref:`User Guide <cosine_similarity>`.
Parameters
----------
X : ndarray or sparse array, shape: (n_samples_X, n_features)
Input data.
Y : ndarray or sparse array, shape: (n_samples_Y, n_features)
Input data. If ``None``, the output will be the pairwise
similarities between all samples in ``X``.
dense_output : boolean (optional), default True
Whether to return dense output even when the input is sparse. If
``False``, the output is sparse if both input arrays are sparse.
Returns
-------
kernel matrix : array
An array with shape (n_samples_X, n_samples_Y).
"""
# to avoid recursive import
X, Y = check_pairwise_arrays(X, Y)
X_normalized = normalize(X, copy=True)
if X is Y:
Y_normalized = X_normalized
else:
Y_normalized = normalize(Y, copy=True)
K = safe_sparse_dot(X_normalized, Y_normalized.T, dense_output=dense_output)
return K
def additive_chi2_kernel(X, Y=None):
"""Computes the additive chi-squared kernel between observations in X and Y
The chi-squared kernel is computed between each pair of rows in X and Y. X
and Y have to be non-negative. This kernel is most commonly applied to
histograms.
The chi-squared kernel is given by::
k(x, y) = -Sum [(x - y)^2 / (x + y)]
It can be interpreted as a weighted difference per entry.
Read more in the :ref:`User Guide <chi2_kernel>`.
Notes
-----
As the negative of a distance, this kernel is only conditionally positive
definite.
Parameters
----------
X : array-like of shape (n_samples_X, n_features)
Y : array of shape (n_samples_Y, n_features)
Returns
-------
kernel_matrix : array of shape (n_samples_X, n_samples_Y)
References
----------
* Zhang, J. and Marszalek, M. and Lazebnik, S. and Schmid, C.
Local features and kernels for classification of texture and object
categories: A comprehensive study
International Journal of Computer Vision 2007
http://research.microsoft.com/en-us/um/people/manik/projects/trade-off/papers/ZhangIJCV06.pdf
See also
--------
chi2_kernel : The exponentiated version of the kernel, which is usually
preferable.
sklearn.kernel_approximation.AdditiveChi2Sampler : A Fourier approximation
to this kernel.
"""
if issparse(X) or issparse(Y):
raise ValueError("additive_chi2 does not support sparse matrices.")
X, Y = check_pairwise_arrays(X, Y)
if (X < 0).any():
raise ValueError("X contains negative values.")
if Y is not X and (Y < 0).any():
raise ValueError("Y contains negative values.")
result = np.zeros((X.shape[0], Y.shape[0]), dtype=X.dtype)
_chi2_kernel_fast(X, Y, result)
return result
def chi2_kernel(X, Y=None, gamma=1.):
"""Computes the exponential chi-squared kernel X and Y.
The chi-squared kernel is computed between each pair of rows in X and Y. X
and Y have to be non-negative. This kernel is most commonly applied to
histograms.
The chi-squared kernel is given by::
k(x, y) = exp(-gamma Sum [(x - y)^2 / (x + y)])
It can be interpreted as a weighted difference per entry.
Read more in the :ref:`User Guide <chi2_kernel>`.
Parameters
----------
X : array-like of shape (n_samples_X, n_features)
Y : array of shape (n_samples_Y, n_features)
gamma : float, default=1.
Scaling parameter of the chi2 kernel.
Returns
-------
kernel_matrix : array of shape (n_samples_X, n_samples_Y)
References
----------
* Zhang, J. and Marszalek, M. and Lazebnik, S. and Schmid, C.
Local features and kernels for classification of texture and object
categories: A comprehensive study
International Journal of Computer Vision 2007
http://research.microsoft.com/en-us/um/people/manik/projects/trade-off/papers/ZhangIJCV06.pdf
See also
--------
additive_chi2_kernel : The additive version of this kernel
sklearn.kernel_approximation.AdditiveChi2Sampler : A Fourier approximation
to the additive version of this kernel.
"""
K = additive_chi2_kernel(X, Y)
K *= gamma
return np.exp(K, K)
# Helper functions - distance
PAIRWISE_DISTANCE_FUNCTIONS = {
# If updating this dictionary, update the doc in both distance_metrics()
# and also in pairwise_distances()!
'cityblock': manhattan_distances,
'cosine': cosine_distances,
'euclidean': euclidean_distances,
'l2': euclidean_distances,
'l1': manhattan_distances,
'manhattan': manhattan_distances,
'precomputed': None, # HACK: precomputed is always allowed, never called
}
def distance_metrics():
"""Valid metrics for pairwise_distances.
This function simply returns the valid pairwise distance metrics.
It exists to allow for a description of the mapping for
each of the valid strings.
The valid distance metrics, and the function they map to, are:
============ ====================================
metric Function
============ ====================================
'cityblock' metrics.pairwise.manhattan_distances
'cosine' metrics.pairwise.cosine_distances
'euclidean' metrics.pairwise.euclidean_distances
'l1' metrics.pairwise.manhattan_distances
'l2' metrics.pairwise.euclidean_distances
'manhattan' metrics.pairwise.manhattan_distances
============ ====================================
Read more in the :ref:`User Guide <metrics>`.
"""
return PAIRWISE_DISTANCE_FUNCTIONS
def _parallel_pairwise(X, Y, func, n_jobs, **kwds):
"""Break the pairwise matrix in n_jobs even slices
and compute them in parallel"""
if n_jobs < 0:
n_jobs = max(cpu_count() + 1 + n_jobs, 1)
if Y is None:
Y = X
if n_jobs == 1:
# Special case to avoid picklability checks in delayed
return func(X, Y, **kwds)
# TODO: in some cases, backend='threading' may be appropriate
fd = delayed(func)
ret = Parallel(n_jobs=n_jobs, verbose=0)(
fd(X, Y[s], **kwds)
for s in gen_even_slices(Y.shape[0], n_jobs))
return np.hstack(ret)
def _pairwise_callable(X, Y, metric, **kwds):
"""Handle the callable case for pairwise_{distances,kernels}
"""
X, Y = check_pairwise_arrays(X, Y)
if X is Y:
# Only calculate metric for upper triangle
out = np.zeros((X.shape[0], Y.shape[0]), dtype='float')
iterator = itertools.combinations(range(X.shape[0]), 2)
for i, j in iterator:
out[i, j] = metric(X[i], Y[j], **kwds)
# Make symmetric
# NB: out += out.T will produce incorrect results
out = out + out.T
# Calculate diagonal
# NB: nonzero diagonals are allowed for both metrics and kernels
for i in range(X.shape[0]):
x = X[i]
out[i, i] = metric(x, x, **kwds)
else:
# Calculate all cells
out = np.empty((X.shape[0], Y.shape[0]), dtype='float')
iterator = itertools.product(range(X.shape[0]), range(Y.shape[0]))
for i, j in iterator:
out[i, j] = metric(X[i], Y[j], **kwds)
return out
_VALID_METRICS = ['euclidean', 'l2', 'l1', 'manhattan', 'cityblock',
'braycurtis', 'canberra', 'chebyshev', 'correlation',
'cosine', 'dice', 'hamming', 'jaccard', 'kulsinski',
'mahalanobis', 'matching', 'minkowski', 'rogerstanimoto',
'russellrao', 'seuclidean', 'sokalmichener',
'sokalsneath', 'sqeuclidean', 'yule', "wminkowski"]
def pairwise_distances(X, Y=None, metric="euclidean", n_jobs=1, **kwds):
""" Compute the distance matrix from a vector array X and optional Y.
This method takes either a vector array or a distance matrix, and returns
a distance matrix. If the input is a vector array, the distances are
computed. If the input is a distances matrix, it is returned instead.
This method provides a safe way to take a distance matrix as input, while
preserving compatibility with many other algorithms that take a vector
array.
If Y is given (default is None), then the returned matrix is the pairwise
distance between the arrays from both X and Y.
Valid values for metric are:
- From scikit-learn: ['cityblock', 'cosine', 'euclidean', 'l1', 'l2',
'manhattan']. These metrics support sparse matrix inputs.
- From scipy.spatial.distance: ['braycurtis', 'canberra', 'chebyshev',
'correlation', 'dice', 'hamming', 'jaccard', 'kulsinski', 'mahalanobis',
'matching', 'minkowski', 'rogerstanimoto', 'russellrao', 'seuclidean',
'sokalmichener', 'sokalsneath', 'sqeuclidean', 'yule']
See the documentation for scipy.spatial.distance for details on these
metrics. These metrics do not support sparse matrix inputs.
Note that in the case of 'cityblock', 'cosine' and 'euclidean' (which are
valid scipy.spatial.distance metrics), the scikit-learn implementation
will be used, which is faster and has support for sparse matrices (except
for 'cityblock'). For a verbose description of the metrics from
scikit-learn, see the __doc__ of the sklearn.pairwise.distance_metrics
function.
Read more in the :ref:`User Guide <metrics>`.
Parameters
----------
X : array [n_samples_a, n_samples_a] if metric == "precomputed", or, \
[n_samples_a, n_features] otherwise
Array of pairwise distances between samples, or a feature array.
Y : array [n_samples_b, n_features], optional
An optional second feature array. Only allowed if metric != "precomputed".
metric : string, or callable
The metric to use when calculating distance between instances in a
feature array. If metric is a string, it must be one of the options
allowed by scipy.spatial.distance.pdist for its metric parameter, or
a metric listed in pairwise.PAIRWISE_DISTANCE_FUNCTIONS.
If metric is "precomputed", X is assumed to be a distance matrix.
Alternatively, if metric is a callable function, it is called on each
pair of instances (rows) and the resulting value recorded. The callable
should take two arrays from X as input and return a value indicating
the distance between them.
n_jobs : int
The number of jobs to use for the computation. This works by breaking
down the pairwise matrix into n_jobs even slices and computing them in
parallel.
If -1 all CPUs are used. If 1 is given, no parallel computing code is
used at all, which is useful for debugging. For n_jobs below -1,
(n_cpus + 1 + n_jobs) are used. Thus for n_jobs = -2, all CPUs but one
are used.
`**kwds` : optional keyword parameters
Any further parameters are passed directly to the distance function.
If using a scipy.spatial.distance metric, the parameters are still
metric dependent. See the scipy docs for usage examples.
Returns
-------
D : array [n_samples_a, n_samples_a] or [n_samples_a, n_samples_b]
A distance matrix D such that D_{i, j} is the distance between the
ith and jth vectors of the given matrix X, if Y is None.
If Y is not None, then D_{i, j} is the distance between the ith array
from X and the jth array from Y.
"""
if (metric not in _VALID_METRICS and
not callable(metric) and metric != "precomputed"):
raise ValueError("Unknown metric %s. "
"Valid metrics are %s, or 'precomputed', or a "
"callable" % (metric, _VALID_METRICS))
if metric == "precomputed":
X, _ = check_pairwise_arrays(X, Y, precomputed=True)
return X
elif metric in PAIRWISE_DISTANCE_FUNCTIONS:
func = PAIRWISE_DISTANCE_FUNCTIONS[metric]
elif callable(metric):
func = partial(_pairwise_callable, metric=metric, **kwds)
else:
if issparse(X) or issparse(Y):
raise TypeError("scipy distance metrics do not"
" support sparse matrices.")
X, Y = check_pairwise_arrays(X, Y)
if n_jobs == 1 and X is Y:
return distance.squareform(distance.pdist(X, metric=metric,
**kwds))
func = partial(distance.cdist, metric=metric, **kwds)
return _parallel_pairwise(X, Y, func, n_jobs, **kwds)
# Helper functions - distance
PAIRWISE_KERNEL_FUNCTIONS = {
# If updating this dictionary, update the doc in both distance_metrics()
# and also in pairwise_distances()!
'additive_chi2': additive_chi2_kernel,
'chi2': chi2_kernel,
'linear': linear_kernel,
'polynomial': polynomial_kernel,
'poly': polynomial_kernel,
'rbf': rbf_kernel,
'sigmoid': sigmoid_kernel,
'cosine': cosine_similarity, }
def kernel_metrics():
""" Valid metrics for pairwise_kernels
This function simply returns the valid pairwise distance metrics.
It exists, however, to allow for a verbose description of the mapping for
each of the valid strings.
The valid distance metrics, and the function they map to, are:
=============== ========================================
metric Function
=============== ========================================
'additive_chi2' sklearn.pairwise.additive_chi2_kernel
'chi2' sklearn.pairwise.chi2_kernel
'linear' sklearn.pairwise.linear_kernel
'poly' sklearn.pairwise.polynomial_kernel
'polynomial' sklearn.pairwise.polynomial_kernel
'rbf' sklearn.pairwise.rbf_kernel
'sigmoid' sklearn.pairwise.sigmoid_kernel
'cosine' sklearn.pairwise.cosine_similarity
=============== ========================================
Read more in the :ref:`User Guide <metrics>`.
"""
return PAIRWISE_KERNEL_FUNCTIONS
KERNEL_PARAMS = {
"additive_chi2": {},
"chi2": {"gamma": 1.},
"cosine": {},
"linear": {},
"poly": {"gamma": None, "degree": 3, "coef0": 1},
"polynomial": {"gamma": None, "degree": 3, "coef0": 1},
"rbf": {"gamma": None},
"sigmoid": {"gamma": None, "coef0": 1},
}
def pairwise_kernels(X, Y=None, metric="linear", filter_params=False,
n_jobs=1, **kwds):
"""Compute the kernel between arrays X and optional array Y.
This method takes either a vector array or a kernel matrix, and returns
a kernel matrix. If the input is a vector array, the kernels are
computed. If the input is a kernel matrix, it is returned instead.
This method provides a safe way to take a kernel matrix as input, while
preserving compatibility with many other algorithms that take a vector
array.
If Y is given (default is None), then the returned matrix is the pairwise
kernel between the arrays from both X and Y.
Valid values for metric are::
['rbf', 'sigmoid', 'polynomial', 'poly', 'linear', 'cosine']
Read more in the :ref:`User Guide <metrics>`.
Parameters
----------
X : array [n_samples_a, n_samples_a] if metric == "precomputed", or, \
[n_samples_a, n_features] otherwise
Array of pairwise kernels between samples, or a feature array.
Y : array [n_samples_b, n_features]
A second feature array only if X has shape [n_samples_a, n_features].
metric : string, or callable
The metric to use when calculating kernel between instances in a
feature array. If metric is a string, it must be one of the metrics
in pairwise.PAIRWISE_KERNEL_FUNCTIONS.
If metric is "precomputed", X is assumed to be a kernel matrix.
Alternatively, if metric is a callable function, it is called on each
pair of instances (rows) and the resulting value recorded. The callable
should take two arrays from X as input and return a value indicating
the distance between them.
n_jobs : int
The number of jobs to use for the computation. This works by breaking
down the pairwise matrix into n_jobs even slices and computing them in
parallel.
If -1 all CPUs are used. If 1 is given, no parallel computing code is
used at all, which is useful for debugging. For n_jobs below -1,
(n_cpus + 1 + n_jobs) are used. Thus for n_jobs = -2, all CPUs but one
are used.
filter_params: boolean
Whether to filter invalid parameters or not.
`**kwds` : optional keyword parameters
Any further parameters are passed directly to the kernel function.
Returns
-------
K : array [n_samples_a, n_samples_a] or [n_samples_a, n_samples_b]
A kernel matrix K such that K_{i, j} is the kernel between the
ith and jth vectors of the given matrix X, if Y is None.
If Y is not None, then K_{i, j} is the kernel between the ith array
from X and the jth array from Y.
Notes
-----
If metric is 'precomputed', Y is ignored and X is returned.
"""
if metric == "precomputed":
X, _ = check_pairwise_arrays(X, Y, precomputed=True)
return X
elif metric in PAIRWISE_KERNEL_FUNCTIONS:
if filter_params:
kwds = dict((k, kwds[k]) for k in kwds
if k in KERNEL_PARAMS[metric])
func = PAIRWISE_KERNEL_FUNCTIONS[metric]
elif callable(metric):
func = partial(_pairwise_callable, metric=metric, **kwds)
else:
raise ValueError("Unknown kernel %r" % metric)
return _parallel_pairwise(X, Y, func, n_jobs, **kwds)
|
carrillo/scikit-learn
|
sklearn/metrics/pairwise.py
|
Python
|
bsd-3-clause
| 44,120
|
[
"Gaussian"
] |
2effb95bdcb991fc19165ad9aa5c9c6a732d8c586a13f387bd90cc801fb9ad94
|
'''
Sort entries in a tabular BLAST output file in reverse order.
-----------------------------------------------------------
(c) 2013 Allegra Via and Kristian Rother
Licensed under the conditions of the Python License
This code appears in section 8.4.2 of the book
"Managing Biological Data with Python".
-----------------------------------------------------------
'''
from operator import itemgetter
input_file = open("BlastOut.csv")
output_file = open("BlastOutSorted.csv","w")
# read BLAST output table
table = []
for line in input_file:
col = line.split(',')
col[2] = float(col[2])
table.append(col)
table_sorted = sorted(table, key=itemgetter(2), reverse=True)
# write sorted table to an output file
for row in table_sorted:
row = [str(x) for x in row]
output_file.write("\t".join(row) + '\n')
input_file.close()
output_file.close()
|
raymonwu/Managing_Your_Biological_Data_with_Python_3
|
08-sorting_data/8.4.2_sort_blast_output.py
|
Python
|
mit
| 883
|
[
"BLAST"
] |
20a014fa3864852431e335d6c0590554c61978153377574be9bbb4291fc4d42d
|
from math import sqrt
import pylab as plt
from ase import Atoms
from gpaw import GPAW
# Lattice constant
a = 5.475
atoms = Atoms(symbols='Si4',
scaled_positions=[(.0, .0, .0),
(.5, .5, .5),
(.0, .5, .75),
(.5, .0, .25)],
cell=(a / sqrt(2), a / sqrt(2), a),
pbc=True)
calc = GPAW(h=.23,
kpts=(6, 6, 4),
nbands=10,
txt='si.txt')
atoms.set_calculator(calc)
atoms.get_potential_energy()
calc.write('si_664.gpw')
energy, dos = calc.get_dos(width=.2)
plt.plot(energy, dos)
plt.axis('tight')
plt.xlabel(r'$\epsilon - \epsilon_F \ \rm{(eV)}$')
plt.ylabel('Density of States (1/eV)')
plt.show()
|
qsnake/gpaw
|
doc/exercises/dos/dos_Si.py
|
Python
|
gpl-3.0
| 768
|
[
"ASE",
"GPAW"
] |
2eef58618c0307fa0fe1c4406d3bc9a2207df93c8dbf8fe0be81b0aafa71b65a
|
"""
JobReport class encapsulates various methods of the job status reporting blah, blah, blah...
"""
from DIRAC import S_OK, S_ERROR, gLogger
from DIRAC.Core.Utilities import Time, DEncode
from DIRAC.Core.DISET.RPCClient import RPCClient
from DIRAC.RequestManagementSystem.Client.Operation import Operation
__RCSID__ = "$Id$"
class JobReport( object ):
"""
.. class:: JobReport
"""
def __init__( self, jobid, source = '' ):
""" c'tor
"""
self.jobStatusInfo = []
self.appStatusInfo = []
self.jobParameters = {}
self.jobID = int( jobid )
self.source = source
if not source:
self.source = 'Job_%d' % self.jobID
def setJob( self, jobID ):
""" Set the job ID for which to send reports
"""
self.jobID = jobID
def setJobStatus( self, status = '', minor = '', application = '', sendFlag = True ):
""" Send job status information to the JobState service for jobID
"""
if not self.jobID:
return S_OK( 'Local execution, jobID is null.' )
timeStamp = Time.toString()
# add job status record
self.jobStatusInfo.append( ( status.replace( "'", '' ), minor.replace( "'", '' ), timeStamp ) )
if application:
self.appStatusInfo.append( ( application.replace( "'", '' ), timeStamp ) )
if sendFlag:
# and send
return self.sendStoredStatusInfo()
return S_OK()
def setApplicationStatus( self, appStatus, sendFlag = True ):
""" Send application status information to the JobState service for jobID
"""
if not self.jobID:
return S_OK( 'Local execution, jobID is null.' )
timeStamp = Time.toString()
# add Application status record
if not isinstance(appStatus, str):
appStatus = repr(appStatus)
self.appStatusInfo.append( ( appStatus.replace( "'", '' ), timeStamp ) )
if sendFlag:
# and send
return self.sendStoredStatusInfo()
return S_OK()
def setJobParameter( self, par_name, par_value, sendFlag = True ):
""" Send job parameter for jobID
"""
if not self.jobID:
return S_OK( 'Local execution, jobID is null.' )
timeStamp = Time.toString()
# add job parameter record
self.jobParameters[par_name] = ( par_value, timeStamp )
if sendFlag:
# and send
return self.sendStoredJobParameters()
return S_OK()
def setJobParameters( self, parameters, sendFlag = True ):
""" Send job parameters for jobID
"""
if not self.jobID:
return S_OK( 'Local execution, jobID is null.' )
timeStamp = Time.toString()
# add job parameter record
for pname, pvalue in parameters:
self.jobParameters[pname] = ( pvalue, timeStamp )
if sendFlag:
# and send
return self.sendStoredJobParameters()
return S_OK()
def sendStoredStatusInfo( self ):
""" Send the job status information stored in the internal cache
"""
statusDict = {}
for status, minor, dtime in self.jobStatusInfo:
statusDict[dtime] = { 'Status': status,
'MinorStatus': minor,
'ApplicationStatus': '',
'Source': self.source }
for appStatus, dtime in self.appStatusInfo:
statusDict[dtime] = { 'Status': '',
'MinorStatus': '',
'ApplicationStatus': appStatus,
'Source': self.source }
if statusDict:
jobMonitor = RPCClient( 'WorkloadManagement/JobStateUpdate', timeout = 60 )
result = jobMonitor.setJobStatusBulk( self.jobID, statusDict )
if result['OK']:
# Empty the internal status containers
self.jobStatusInfo = []
self.appStatusInfo = []
return result
else:
return S_OK( 'Empty' )
def sendStoredJobParameters( self ):
""" Send the job parameters stored in the internal cache
"""
parameters = []
for pname, value in self.jobParameters.items():
pvalue, _timeStamp = value
parameters.append( ( pname, pvalue ) )
if parameters:
jobMonitor = RPCClient( 'WorkloadManagement/JobStateUpdate', timeout = 60 )
result = jobMonitor.setJobParameters( self.jobID, parameters )
if not result['OK']:
return result
if result['OK']:
# Empty the internal parameter container
self.jobParameters = {}
return result
else:
return S_OK( 'Empty' )
def commit( self ):
""" Send all the accumulated information
"""
success = True
result = self.sendStoredStatusInfo()
if not result['OK']:
success = False
result = self.sendStoredJobParameters()
if not result['OK']:
success = False
if success:
return S_OK()
return S_ERROR( 'Information upload to JobStateUpdate service failed' )
def dump( self ):
""" Print out the contents of the internal cached information
"""
print "Job status info:"
for status, minor, timeStamp in self.jobStatusInfo:
print status.ljust( 20 ), minor.ljust( 30 ), timeStamp
print "Application status info:"
for status, timeStamp in self.appStatusInfo:
print status.ljust( 20 ), timeStamp
print "Job parameters:"
for pname, value in self.jobParameters.items():
pvalue, timeStamp = value
print pname.ljust( 20 ), pvalue.ljust( 30 ), timeStamp
def generateForwardDISET( self ):
""" Generate and return failover requests for the operations in the internal cache
"""
forwardDISETOp = None
result = self.sendStoredStatusInfo()
if not result['OK']:
gLogger.error( "Error while sending the job status", result['Message'] )
if 'rpcStub' in result:
rpcStub = result['rpcStub']
forwardDISETOp = Operation()
forwardDISETOp.Type = "ForwardDISET"
forwardDISETOp.Arguments = DEncode.encode( rpcStub )
else:
return S_ERROR( 'Could not create ForwardDISET operation' )
return S_OK( forwardDISETOp )
|
Andrew-McNab-UK/DIRAC
|
WorkloadManagementSystem/Client/JobReport.py
|
Python
|
gpl-3.0
| 5,984
|
[
"DIRAC"
] |
ed7c61b86fbb0094df42e2be1239a87242a0d71b81d00fc5c24ae044acdcfb76
|
import copy
import exchange
import lan
from lan import ast_buildingblock as ast_bb
from processing import collect_array as ca
from processing import collect_device as cd
from processing import collect_gen as cg
from processing import collect_id as ci
from processing import collect_loop as cl
class Stencil(object):
def __init__(self, ast):
self.ast = ast
def stencil(self, arr_names, west=0, north=0, east=0, south=0, middle=1):
direction = [west, north, east, south, middle]
dirname = [(0, -1), (1, 0), (0, 1), (-1, 0), (0, 0)]
loadings = [elem for i, elem in enumerate(dirname)
if direction[i] == 1]
if not loadings:
loadings = [(0, 0)]
# finding the correct local memory size
arr_name = arr_names[0]
local = cl.get_local(self.ast)
num_array_dims = ca.get_num_array_dims(self.ast)
par_dim = cl.get_par_dim(self.ast)
local_dims = [int(local['size'][0]) for _ in xrange(num_array_dims[arr_name])]
if par_dim == 1 and len(local_dims) == 2:
local_dims[0] = 1
index_in_subscript = ca.get_indices_in_array_ref(self.ast)
arr_idx = index_in_subscript[arr_name]
(lower_limit, _) = cl.get_loop_limits(self.ast)
local_offset = [int(lower_limit[i]) for i in arr_idx]
for (x, y) in loadings:
local_dims[0] += abs(x)
if num_array_dims[arr_name] == 2:
local_dims[1] += abs(y)
stats = []
types = ci.get_types(self.ast)
for arr_name in arr_names:
local_name = arr_name + '_local'
array_init = lan.Constant(local_dims[0])
if len(local_dims) == 2:
array_init = [lan.BinOp(lan.Constant(local_dims[0]), '*', lan.Constant(local_dims[1]))]
local_array_type_id = lan.ArrayTypeId(['__local'] + [types[arr_name][0]], lan.Id(local_name),
array_init)
self.ast.ext.append(lan.Stencil(lan.Id(arr_name), lan.Id(local_name),
[local['size'][0], local['size'][0]]))
stats.append(local_array_type_id)
init_comp = lan.GroupCompound(stats)
stats2 = []
load_comp = lan.GroupCompound(stats2)
reverse_idx = cg.get_reverse_idx(self.ast)
grid_indices = cl.get_grid_indices(self.ast)
# Insert local id with offset
for i, offset in enumerate(local_offset):
idd = reverse_idx[i] if len(local_offset) == 2 else i
get_local_func_decl = ast_bb.FuncCall('get_local_id', [lan.Constant(idd)])
if offset != 0:
rval = lan.BinOp(get_local_func_decl, '+', lan.Constant(offset))
else:
rval = lan.Id(get_local_func_decl)
lval = lan.TypeId(['unsigned'], lan.Id('l' + grid_indices[i]))
stats.append(lan.Assignment(lval, rval))
local_swap = ci.get_local_swap(self.ast)
index_to_local_var = cg.get_local_array_idx(self.ast)
exchange_indices = exchange.ExchangeIndices(index_to_local_var, local_swap.values())
loop_arrays = ca.get_loop_arrays(self.ast)
# Creating the loading of values into the local array.
for arr_name in arr_names:
for k, l in enumerate(loadings):
array_id = lan.Id(arr_name)
# get first ArrayRef
aref = loop_arrays[arr_name][k]
subscript = aref.subscript
lsub = copy.deepcopy(subscript)
lval = lan.ArrayRef(lan.Id(local_swap[arr_name]), lsub)
rsub = copy.deepcopy(subscript)
rval = lan.ArrayRef(array_id, rsub, extra={'localMemory': True})
load = lan.Assignment(lval, rval)
exchange_id = exchange.ExchangeId(index_to_local_var)
orisub = subscript
for m in orisub:
exchange_id.visit(m)
stats2.append(load)
# Must also create the barrier
arglist = lan.ArgList([lan.Id('CLK_LOCAL_MEM_FENCE')])
func = ast_bb.EmptyFuncDecl('barrier', type=[])
func.arglist = arglist
stats2.append(func)
exchange_indices.visit(init_comp)
exchange_indices.visit(load_comp)
kernel = cd.get_kernel(self.ast)
kernel.statements.insert(0, load_comp)
kernel.statements.insert(0, init_comp)
|
dikujepsen/OpenTran
|
v3.0/framework/transformation/stencil.py
|
Python
|
mit
| 4,516
|
[
"VisIt"
] |
10afea3b7cb596cde37ca912f49ba60c1d15e0ff5912893a218a9e7b5aa34aa7
|
#!/usr/bin/env python
import fnmatch
import os
from argparse import ArgumentParser
import json
import subprocess
def combine_known_symbols(kn1, kn2):
for key, value in kn2.items():
if key not in kn1:
kn1[key] = value
else:
kn1[key] += value
return kn1
def parse_folder(folder):
matches = []
for root, dirnames, filenames in os.walk(folder):
for filename in fnmatch.filter(filenames, '*.tex'):
matches.append(os.path.join(root, filename))
known_symbols = {}
for filename in matches:
if is_latex_root(filename):
print(filename)
proc = subprocess.Popen(["./build-language-model.py", "-f %s" % filename,
"-o out.txt"],
stdout=subprocess.PIPE, shell=True)
(out, err) = proc.communicate()
with open("out.txt") as f:
out = f.read()
kn2 = json.loads(out)
known_symbols = combine_known_symbols(known_symbols, kn2)
return known_symbols
def is_latex_root(filename):
with open(filename) as f:
content = f.read()
return "\\documentclass" in content
def print_known_symbols(known_symbols):
for latex, counter in sorted(known_symbols.items(),
key=lambda x: x[1],
reverse=True):
if counter > 0:
print("%s: %i" % (latex, counter))
if __name__ == '__main__':
parser = ArgumentParser()
folder = '/home/moose/Downloads/LaTeX-examples/'
parser.add_argument("-f", "--folder", dest="folder",
default=folder,
help="folder with multiple LaTeX files",
metavar="FOLDER")
args = parser.parse_args()
known_symbols = parse_folder(args.folder)
print_known_symbols(known_symbols)
|
MartinThoma/write-math
|
tools/language_model/language-model-building/parse_folder.py
|
Python
|
mit
| 1,918
|
[
"MOOSE"
] |
18400fca9ccac09d574947e819ca50395ce6217814b81ff77d94c193bcd5e15b
|
# !usr/bin/env python2
# -*- coding: utf-8 -*-
#
# Licensed under a 3-clause BSD license.
#
# @Author: Brian Cherinka
# @Date: 2017-02-12 17:38:51
# @Last modified by: Brian Cherinka
# @Last Modified time: 2017-03-25 16:08:37
from __future__ import print_function, division, absolute_import
from flask_testing import TestCase
from marvin.web import create_app
from marvin import config, marvindb
from marvin.tests import MarvinTest
class MarvinWebTester(MarvinTest, TestCase):
''' Base Marvin Web Tester for Flask and API '''
def create_app(self):
app = create_app(debug=True, local=True, use_profiler=False)
app.config['TESTING'] = True
app.config['WTF_CSRF_ENABLED'] = False
app.config['PRESERVE_CONTEXT_ON_EXCEPTION'] = False
return app
@classmethod
def setUpClass(cls):
super(MarvinWebTester, cls).setUpClass()
def setUp(self):
marvindb = self._marvindb
self.session = marvindb.session
self.long_message = True
self.response = None
self.data = None
self.json = None
self.set_sasurl('local')
config.forceDbOn()
self.urlmap = config.urlmap
self.blue = None
def tearDown(self):
pass
def _load_page(self, reqtype, page, params=None):
if reqtype == 'get':
self.response = self.client.get(page, query_string=params)
elif reqtype == 'post':
self.response = self.client.post(page, data=params, content_type='application/x-www-form-urlencoded')
self._load_data()
def _load_data(self):
try:
self.json = self.response.json
except ValueError as e:
self.json = None
self.data = self.json['data'] if self.json and 'data' in self.json else ''
def get_url(self, endpoint):
return self.urlmap[self.blue][endpoint]['url']
def assert422(self, response, message=None):
self.assertStatus(response, 422, message)
def assertListIn(self, a, b):
''' assert all items in list a are in b '''
for item in a:
self.assertIn(item, b)
def _assert_webjson_success(self, data):
self.assert200(self.response, message='response status should be 200 for ok')
if isinstance(data, str):
self.assertIn(data, self.json['result'])
elif isinstance(data, dict):
self.assertEqual(1, self.json['result']['status'])
self.assertDictContainsSubset(data, self.json['result'])
elif isinstance(data, list):
self.assertListIn(data, self.json['result'])
def _route_no_valid_webparams(self, url, noparam, reqtype='get', params=None, errmsg=None):
self._load_page(reqtype, url, params=params)
self.assert422(self.response, message='response status should be 422 for invalid params')
self.assert_template_used('errors/unprocessable_entity.html')
noparam = [noparam] if not isinstance(noparam, list) else noparam
invalid = {p: [errmsg] for p in noparam}
self.assert_context('data', invalid, message='response should contain validation error dictionary')
def test_db_stuff(self):
self.assertIsNotNone(marvindb)
self.assertIsNotNone(marvindb.datadb)
self.assertIsNotNone(marvindb.sampledb)
self.assertIsNotNone(marvindb.dapdb)
self.assertEqual('local', marvindb.dbtype)
# def assert_template_used(self, name):
# ''' overriding the built-in one in Flask-Testing so we can also test against error templates '''
# template_list = self.app.jinja_env.list_templates()
# if name in template_list:
# return True
# else:
# raise AssertionError("Template {0} not used. Templates were used: {1}".format(name, ', '.join(template_list)))
|
bretthandrews/marvin
|
python/marvin/tests/web/__init__.py
|
Python
|
bsd-3-clause
| 3,851
|
[
"Brian"
] |
dadbc8a9b1da4d0160e5d1c200c8c836475025c6202b9412d294d23f322f4696
|
###########################################################################
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
###########################################################################
#
# This code generated (see starthinker/scripts for possible source):
# - Command: "python starthinker_ui/manage.py airflow"
#
###########################################################################
'''
--------------------------------------------------------------
Before running this Airflow module...
Install StarThinker in cloud composer ( recommended ):
From Release: pip install starthinker
From Open Source: pip install git+https://github.com/google/starthinker
Or push local code to the cloud composer plugins directory ( if pushing local code changes ):
source install/deploy.sh
4) Composer Menu
l) Install All
--------------------------------------------------------------
If any recipe task has "auth" set to "user" add user credentials:
1. Ensure an RECIPE['setup']['auth']['user'] = [User Credentials JSON]
OR
1. Visit Airflow UI > Admin > Connections.
2. Add an Entry called "starthinker_user", fill in the following fields. Last step paste JSON from authentication.
- Conn Type: Google Cloud Platform
- Project: Get from https://github.com/google/starthinker/blob/master/tutorials/cloud_project.md
- Keyfile JSON: Get from: https://github.com/google/starthinker/blob/master/tutorials/deploy_commandline.md#optional-setup-user-credentials
--------------------------------------------------------------
If any recipe task has "auth" set to "service" add service credentials:
1. Ensure an RECIPE['setup']['auth']['service'] = [Service Credentials JSON]
OR
1. Visit Airflow UI > Admin > Connections.
2. Add an Entry called "starthinker_service", fill in the following fields. Last step paste JSON from authentication.
- Conn Type: Google Cloud Platform
- Project: Get from https://github.com/google/starthinker/blob/master/tutorials/cloud_project.md
- Keyfile JSON: Get from: https://github.com/google/starthinker/blob/master/tutorials/cloud_service.md
--------------------------------------------------------------
Airflow Composer Example
Demonstration that uses Airflow/Composer native, Airflow/Composer local, and StarThinker tasks in the same generated DAG.
- Execute this using Airflow or Composer, the Colab and UI recipe is for refence only.
- This is an example DAG that will execute and print dates and text.
- Run it once to ensure everything works, then customize it.
--------------------------------------------------------------
This StarThinker DAG can be extended with any additional tasks from the following sources:
- https://google.github.io/starthinker/
- https://github.com/google/starthinker/tree/master/dags
'''
from starthinker.airflow.factory import DAG_Factory
INPUTS = {
'auth_read':'user', # Credentials used for reading data.
}
RECIPE = {
'setup':{
'week':[
'Mon',
'Tue',
'Wed',
'Thu',
'Fri',
'Sat',
'Sun'
],
'hour':[
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23
]
},
'tasks':[
{
'airflow':{
'__comment__':'Calls a native Airflow operator.',
'operators':{
'bash_operator':{
'BashOperator':{
'bash_command':'date'
}
}
}
}
},
{
'starthinker.airflow':{
'__comment__':'Calls an custom operator, requires import of library.',
'operators':{
'hello':{
'Hello':{
'say':'Hi, there!'
}
}
}
}
},
{
'hello':{
'__comment__':'Calls a StarThinker task.',
'auth':{'field':{'name':'auth_read','kind':'authentication','order':1,'default':'user','description':'Credentials used for reading data.'}},
'say':'Hello World'
}
}
]
}
dag_maker = DAG_Factory('airflow', RECIPE, INPUTS)
dag = dag_maker.generate()
if __name__ == "__main__":
dag_maker.print_commandline()
|
google/starthinker
|
dags/airflow_dag.py
|
Python
|
apache-2.0
| 4,871
|
[
"VisIt"
] |
90eaca74fb72c9293d190122c67156ede3827f437a1f8cb79efb20e15e397b68
|
hucs = [['01','New England'],
['0101','St. John'],
['010100','St. John'],
['01010001','Upper St. John'],
['01010002','Allagash'],
['01010003','Fish'],
['01010004','Aroostook'],
['01010005','Meduxnekeag'],
['0102','Penobscot'],
['010200','Penobscot'],
['01020001','West Branch Penobscot'],
['01020002','East Branch Penobscot'],
['01020003','Mattawamkeag'],
['01020004','Piscataquis'],
['01020005','Lower Penobscot'],
['0103','Kennebec'],
['010300','Kennebec'],
['01030001','Upper Kennebec'],
['01030002','Dead'],
['01030003','Lower Kennebec'],
['0104','Androscoggin'],
['010400','Androscoggin'],
['01040001','Upper Androscoggin'],
['01040002','Lower Androscoggin'],
['0105','Maine Coastal'],
['010500','Maine Coastal'],
['01050001','St. Croix'],
['01050002','Maine Coastal'],
['01050003','St. George-Sheepscot'],
['0106','Saco'],
['010600','Saco'],
['01060001','Presumpscot'],
['01060002','Saco'],
['01060003','Piscataqua-Salmon Falls'],
['0107','Merrimack'],
['010700','Merrimack'],
['01070001','Pemigewasset'],
['01070002','Merrimack'],
['01070003','Contoocook'],
['01070004','Nashua'],
['01070005','Concord'],
['0108','Connecticut'],
['010801','Upper Connecticut'],
['01080101','Upper Connecticut'],
['01080102','Passumpsic'],
['01080103','Waits'],
['01080104','Upper Connecticut-Mascoma'],
['01080105','White'],
['01080106','Black-Ottauquechee'],
['01080107','West'],
['010802','Lower Connecticut'],
['01080201','Middle Connecticut'],
['01080202','Miller'],
['01080203','Deerfield'],
['01080204','Chicopee'],
['01080205','Lower Connecticut'],
['01080206','Westfield'],
['01080207','Farmington'],
['0109','Massachusetts-Rhode Island Coastal'],
['010900','Massachusetts-Rhode Island Coastal'],
['01090001','Charles'],
['01090002','Cape Cod'],
['01090003','Blackstone'],
['01090004','Narragansett'],
['01090005','Pawcatuck-Wood'],
['0110','Connecticut Coastal'],
['011000','Connecticut Coastal'],
['01100001','Quinebaug'],
['01100002','Shetucket'],
['01100003','Thames'],
['01100004','Quinnipiac'],
['01100005','Housatonic'],
['01100006','Saugatuck'],
['01100007','Long Island Sound'],
['0111','St. Francois'],
['011100','St. Francois'],
['01110000','St. Francois'],
['02','Mid Atlantic'],
['0201','Richelieu'],
['020100','Richelieu'],
['02010001','Lake George'],
['02010002','Otter'],
['02010003','Winooski'],
['02010004','Ausable'],
['02010005','Lamoille'],
['02010006','Great Chazy-Saranac'],
['02010007','Missisquoi'],
['0202','Upper Hudson'],
['020200','Upper Hudson'],
['02020001','Upper Hudson'],
['02020002','Sacandaga'],
['02020003','Hudson-Hoosic'],
['02020004','Mohawk'],
['02020005','Schoharie'],
['02020006','Middle Hudson'],
['02020007','Rondout'],
['02020008','Hudson-Wappinger'],
['0203','Lower Hudson-Long Island'],
['020301','Lower Hudson'],
['02030101','Lower Hudson'],
['02030102','Bronx'],
['02030103','Hackensack-Passaic'],
['02030104','Sandy Hook-Staten Island'],
['02030105','Raritan'],
['020302','Long Island'],
['02030201','Northern Long Island'],
['02030202','Southern Long Island'],
['0204','Delaware'],
['020401','Upper Delaware'],
['02040101','Upper Delaware'],
['02040102','East Branch Delaware'],
['02040103','Lackawaxen'],
['02040104','Middle Delaware-Mongaup-Brodhead'],
['02040105','Middle Delaware-Musconetcong'],
['02040106','Lehigh'],
['020402','Lower Delaware'],
['02040201','Crosswicks-Neshaminy'],
['02040202','Lower Delaware'],
['02040203','Schuylkill'],
['02040204','Delaware Bay'],
['02040205','Brandywine-Christina'],
['02040206','Cohansey-Maurice'],
['02040207','Broadkill-Smyrna'],
['020403','New Jersey Coastal'],
['02040301','Mullica-Toms'],
['02040302','Great Egg Harbor'],
['0205','Susquehanna'],
['020501','Upper Susquehanna'],
['02050101','Upper Susquehanna'],
['02050102','Chenango'],
['02050103','Owego-Wappasening'],
['02050104','Tioga'],
['02050105','Chemung'],
['02050106','Upper Susquehanna-Tunkhannock'],
['02050107','Upper Susquehanna-Lackawanna'],
['020502','West Branch Susquehanna'],
['02050201','Upper West Branch Susquehanna'],
['02050202','Sinnemahoning'],
['02050203','Middle West Branch Susquehanna'],
['02050204','Bald Eagle'],
['02050205','Pine'],
['02050206','Lower West Branch Susquehanna'],
['020503','Lower Susquehanna'],
['02050301','Lower Susquehanna-Penns'],
['02050302','Upper Juniata'],
['02050303','Raystown'],
['02050304','Lower Juniata'],
['02050305','Lower Susquehanna-Swatara'],
['02050306','Lower Susquehanna'],
['0206','Upper Chesapeake'],
['020600','Upper Chesapeake'],
['02060001','Upper Chesapeake Bay'],
['02060002','Chester-Sassafras'],
['02060003','Gunpowder-Patapsco'],
['02060004','Severn'],
['02060005','Choptank'],
['02060006','Patuxent'],
['02060007','Blackwater-Wicomico'],
['02060008','Nanticoke'],
['02060009','Pocomoke'],
['02060010','Chincoteague'],
['0207','Potomac'],
['020700','Potomac'],
['02070001','South Branch Potomac'],
['02070002','North Branch Potomac'],
['02070003','Cacapon-Town'],
['02070004','Conococheague-Opequon'],
['02070005','South Fork Shenandoah'],
['02070006','North Fork Shenandoah'],
['02070007','Shenandoah'],
['02070008','Middle Potomac-Catoctin'],
['02070009','Monocacy'],
['02070010','Middle Potomac-Anacostia-Occoquan'],
['02070011','Lower Potomac'],
['0208','Lower Chesapeake'],
['020801','Lower Chesapeake'],
['02080101','Lower Chesapeake Bay'],
['02080102','Great Wicomico-Piankatank'],
['02080103','Rapidan-Upper Rappahannock'],
['02080104','Lower Rappahannock'],
['02080105','Mattaponi'],
['02080106','Pamunkey'],
['02080107','York'],
['02080108','Lynnhaven-Poquoson'],
['02080109','Western Lower Delmarva'],
['02080110','Eastern Lower Delmarva'],
['020802','James'],
['02080201','Upper James'],
['02080202','Maury'],
['02080203','Middle James-Buffalo'],
['02080204','Rivanna'],
['02080205','Middle James-Willis'],
['02080206','Lower James'],
['02080207','Appomattox'],
['02080208','Hampton Roads'],
['03','South Atlantic-Gulf'],
['0301','Chowan-Roanoke'],
['030101','Roanoke'],
['03010101','Upper Roanoke'],
['03010102','Middle Roanoke'],
['03010103','Upper Dan'],
['03010104','Lower Dan'],
['03010105','Banister'],
['03010106','Roanoke Rapids'],
['03010107','Lower Roanoke'],
['030102','Albemarle-Chowan'],
['03010201','Nottoway'],
['03010202','Blackwater'],
['03010203','Chowan'],
['03010204','Meherrin'],
['03010205','Albemarle'],
['0302','Neuse-Pamlico'],
['030201','Pamlico'],
['03020101','Upper Tar'],
['03020102','Fishing'],
['03020103','Lower Tar'],
['03020104','Pamlico'],
['03020105','Pamlico Sound'],
['03020106','Bogue-Core Sounds'],
['030202','Neuse'],
['03020201','Upper Neuse'],
['03020202','Middle Neuse'],
['03020203','Contentnea'],
['03020204','Lower Neuse'],
['0303','Cape Fear'],
['030300','Cape Fear'],
['03030001','New'],
['03030002','Haw'],
['03030003','Deep'],
['03030004','Upper Cape Fear'],
['03030005','Lower Cape Fear'],
['03030006','Black'],
['03030007','Northeast Cape Fear'],
['0304','Pee Dee'],
['030401','Upper Pee Dee'],
['03040101','Upper Yadkin'],
['03040102','South Yadkin'],
['03040103','Lower Yadkin'],
['03040104','Upper Pee Dee'],
['03040105','Rocky'],
['030402','Lower Pee Dee'],
['03040201','Lower Pee Dee'],
['03040202','Lynches'],
['03040203','Lumber'],
['03040204','Little Pee Dee'],
['03040205','Black'],
['03040206','Waccamaw'],
['03040207','Carolina Coastal-Sampit'],
['0305','Edisto-Santee'],
['030501','Santee'],
['03050101','Upper Catawba'],
['03050102','South Fork Catawba'],
['03050103','Lower Catawba'],
['03050104','Wateree'],
['03050105','Upper Broad'],
['03050106','Lower Broad'],
['03050107','Tyger'],
['03050108','Enoree'],
['03050109','Saluda'],
['03050110','Congaree'],
['03050111','Lake Marion'],
['03050112','Santee'],
['030502','Edisto-South Carolina Coastal'],
['03050201','Cooper'],
['03050202','South Carolina Coastal'],
['03050203','North Fork Edisto'],
['03050204','South Fork Edisto'],
['03050205','Edisto'],
['03050206','Four Hole Swamp'],
['03050207','Salkehatchie'],
['03050208','Broad-St. Helena'],
['0306','Ogeechee-Savannah'],
['030601','Savannah'],
['03060101','Seneca'],
['03060102','Tugaloo'],
['03060103','Upper Savannah'],
['03060104','Broad'],
['03060105','Little'],
['03060106','Middle Savannah'],
['03060107','Stevens'],
['03060108','Brier'],
['03060109','Lower Savannah'],
['030602','Ogeechee'],
['03060201','Upper Ogeechee'],
['03060202','Lower Ogeechee'],
['03060203','Canoochee'],
['03060204','Ogeechee Coastal'],
['0307','Altamaha - St. Marys'],
['030701','Altamaha'],
['03070101','Upper Oconee'],
['03070102','Lower Oconee'],
['03070103','Upper Ocmulgee'],
['03070104','Lower Ocmulgee'],
['03070105','Little Ocmulgee'],
['03070106','Altamaha'],
['03070107','Ohoopee'],
['030702','St. Marys - Satilla'],
['03070201','Satilla'],
['03070202','Little Satilla'],
['03070203','Cumberland-St. Simons'],
['03070204','St. Marys'],
['03070205','Nassau'],
['0308','St. Johns'],
['030801','St. Johns'],
['03080101','Upper St. Johns'],
['03080102','Oklawaha'],
['03080103','Lower St. Johns'],
['030802','East Florida Coastal'],
['03080201','Daytona - St. Augustine'],
['03080202','Cape Canaveral'],
['03080203','Vero Beach'],
['0309','Southern Florida'],
['030901','Kissimmee'],
['03090101','Kissimmee'],
['03090102','Northern Okeechobee Inflow'],
['03090103','Western Okeechobee Inflow'],
['030902','Southern Florida'],
['03090201','Lake Okeechobee'],
['03090202','Everglades'],
['03090203','Florida Bay-Florida Keys'],
['03090204','Big Cypress Swamp'],
['03090205','Caloosahatchee'],
['0310','Peace-Tampa Bay'],
['031001','Peace'],
['03100101','Peace'],
['03100102','Myakka'],
['03100103','Charlotte Harbor'],
['031002','Tampa Bay'],
['03100201','Sarasota Bay'],
['03100202','Manatee'],
['03100203','Little Manatee'],
['03100204','Alafia'],
['03100205','Hillsborough'],
['03100206','Tampa Bay'],
['03100207','Crystal-Pithlachascotee'],
['03100208','Withlacoochee'],
['0311','Suwannee'],
['031101','Aucilla-Waccasassa'],
['03110101','Waccasassa'],
['03110102','Econfina-Steinhatchee'],
['03110103','Aucilla'],
['031102','Suwannee'],
['03110201','Upper Suwannee'],
['03110202','Alapaha'],
['03110203','withlacoochee'],
['03110204','Little'],
['03110205','Lower Suwannee'],
['03110206','Santa Fe'],
['0312','Ochlockonee'],
['031200','Ochlockonee. Georgia'],
['03120001','Apalachee Bay-St. Marks'],
['03120002','Upper Ochlockonee'],
['03120003','Lower Ochlockonee'],
['0313','Apalachicola'],
['031300','Apalachicola'],
['03130001','Upper Chattahoochee'],
['03130002','Middle Chattahoochee-Lake Harding'],
['03130003','Middle Chattahoochee-Walter F. George Reservoir'],
['03130004','Lower Chattahoochee'],
['03130005','Upper Flint'],
['03130006','Middle Flint'],
['03130007','Kinchafoonee-Muckalee'],
['03130008','Lower Flint'],
['03130009','Ichawaynochaway'],
['03130010','Spring'],
['03130011','Apalachicola'],
['03130012','Chipola'],
['03130013','New'],
['03130014','Apalachicola Bay'],
['0314','Choctawhatchee - Escambia'],
['031401','Florida Panhandle Coastal'],
['03140101','St. Andrew-St. Joseph Bays'],
['03140102','Choctawhatchee Bay'],
['03140103','Yellow'],
['03140104','Blackwater'],
['03140105','Pensacola Bay'],
['03140106','Perdido'],
['03140107','Perdido Bay'],
['031402','Choctawhatchee'],
['03140201','Upper Choctawhatchee'],
['03140202','Pea'],
['03140203','Lower Choctawhatchee'],
['031403','Escambia'],
['03140301','Upper Conecuh'],
['03140302','Patsaliga'],
['03140303','Sepulga'],
['03140304','Lower Conecuh'],
['03140305','Escambia'],
['0315','Alabama'],
['031501','Coosa-Tallapoosa'],
['03150101','Conasauga'],
['03150102','Coosawattee'],
['03150103','Oostanaula'],
['03150104','Etowah'],
['03150105','Upper Coosa'],
['03150106','Middle Coosa'],
['03150107','Lower Coosa'],
['03150108','Upper Tallapoosa'],
['03150109','Middle Tallapoosa'],
['03150110','Lower Tallapoosa'],
['031502','Alabama'],
['03150201','Upper Alabama'],
['03150202','Cahaba'],
['03150203','Middle Alabama'],
['03150204','Lower Alabama'],
['0316','Mobile - Tombigbee'],
['031601','Black Warrior - Tombigbee'],
['03160101','Upper Tombigbee'],
['03160102','Town'],
['03160103','Buttahatchee'],
['03160104','Tibbee'],
['03160105','Luxapallila'],
['03160106','Middle Tombigbee-Lubbub'],
['03160107','Sipsey'],
['03160108','Noxubee'],
['03160109','Mulberry'],
['03160110','Sipsey Fork'],
['03160111','Locust'],
['03160112','Upper Black Warrior'],
['03160113','Lower Black Warrior'],
['031602','Mobile Bay- Tombigbee'],
['03160201','Middle Tombigbee-Chickasaw'],
['03160202','Sucarnoochee'],
['03160203','Lower Tambigbee'],
['03160204','Mobile - Tensaw'],
['03160205','Mobile Bay'],
['0317','Pascagoula'],
['031700','Pascagoula. Mississippi'],
['03170001','Chunky-Okatibbee'],
['03170002','Upper Chickasawhay'],
['03170003','Lower Chickasawhay'],
['03170004','Upper Leaf'],
['03170005','Lower Leaf'],
['03170006','Pascagoula'],
['03170007','Black'],
['03170008','Escatawpa'],
['03170009','Mississippi Coastal'],
['0318','Pearl'],
['031800','Pearl'],
['03180001','Upper Pearl'],
['03180002','Middle Pearl-Strong'],
['03180003','Middle Pearl-Silver'],
['03180004','Lower Pearl. Mississippi'],
['03180005','Bogue Chitto'],
['04','Great Lakes'],
['0401','Western Lake Superior'],
['040101','Northwestern Lake Superior'],
['04010101','Baptism-Brule'],
['04010102','Beaver-Lester'],
['040102','St. Louis'],
['04010201','St. Louis'],
['04010202','Cloquet'],
['040103','Southwestern Lake Superior'],
['04010301','Beartrap-Nemadji'],
['04010302','Bad-Montreal'],
['0402','Southern Lake Superior-Lake Superior'],
['040201','Southcentral Lake Superior'],
['04020101','Black-Presque Isle'],
['04020102','Ontonagon'],
['04020103','Keweenaw Peninsula'],
['04020104','Sturgeon'],
['04020105','Dead-Kelsey'],
['040202','Southeastern Lake Superior'],
['04020201','Betsy-Chocolay'],
['04020202','Tahquamenon'],
['04020203','Waiska'],
['040203','Lake Superior'],
['04020300','Lake Superior'],
['0403','Northwestern Lake Michigan'],
['040301','Northwestern Lake Michigan'],
['04030101','Manitowoc-Sheboygan'],
['04030102','Door-Kewaunee'],
['04030103','Duck-Pensaukee'],
['04030104','Oconto'],
['04030105','Peshtigo'],
['04030106','Brule'],
['04030107','Michigamme'],
['04030108','Menominee'],
['04030109','Cedar-Ford'],
['04030110','Escanaba'],
['04030111','Tacoosh-Whitefish'],
['04030112','Fishdam-Sturgeon'],
['040302','Fox'],
['04030201','Upper Fox'],
['04030202','Wolf'],
['04030203','Lake Winnebago'],
['04030204','Lower Fox'],
['0404','Southwestern Lake Michigan'],
['040400','Southwestern Lake Michigan'],
['04040001','Little Calumet-Galien'],
['04040002','Pike-Root'],
['04040003','Milwaukee'],
['0405','Southeastern Lake Michigan'],
['040500','Southeastern Lake Michigan'],
['04050001','St. Joseph'],
['04050002','Black-Macatawa'],
['04050003','Kalamazoo'],
['04050004','Upper Grand'],
['04050005','Maple'],
['04050006','Lower Grand'],
['04050007','Thornapple'],
['0406','Northeastern Lake Michigan-Lake Michigan'],
['040601','Northeastern Lake Michigan'],
['04060101','Pere Marquette-White'],
['04060102','Muskegon'],
['04060103','Manistee'],
['04060104','Betsie-Platte'],
['04060105','Boardman-Charlevoix'],
['04060106','Manistique'],
['04060107','Brevoort-Millecoquins'],
['040602','Lake Michigan'],
['04060200','Lake Michigan'],
['0407','Northwestern Lake Huron'],
['040700','Northwestern Lake Huron'],
['04070001','St. Marys'],
['04070002','Carp-Pine'],
['04070003','Lone Lake-Ocqueoc'],
['04070004','Cheboygan'],
['04070005','Black'],
['04070006','Thunder Bay'],
['04070007','Au Sable'],
['0408','Southwestern Lake Huron-Lake Huron'],
['040801','Southwestern Lake Huron'],
['04080101','Au Gres-Rifle'],
['04080102','Kawkawlin-Pine'],
['04080103','Pigeon-Wiscoggin'],
['04080104','Birch-Willow'],
['040802','Saginaw'],
['04080201','Tittabawassee'],
['04080202','Pine'],
['04080203','Shiawassee'],
['04080204','Flint'],
['04080205','Cass'],
['04080206','Saginaw'],
['040803','Lake Huron'],
['04080300','Lake Huron'],
['0409','St. Clair-Detroit'],
['040900','St. Clair-Detroit'],
['04090001','St. Clair'],
['04090002','Lake St. Clair'],
['04090003','Clinton'],
['04090004','Detroit'],
['04090005','Huron'],
['0410','Western Lake Erie'],
['041000','Western Lake Erie'],
['04100001','Ottawa-Stony'],
['04100002','Raisin'],
['04100003','St. Joseph'],
['04100004','St. Marys'],
['04100005','Upper Maumee'],
['04100006','Tiffin'],
['04100007','Auglaize'],
['04100008','Blanchard'],
['04100009','Lower Maumee'],
['04100010','Cedar-Portage'],
['04100011','Sandusky'],
['04100012','Huron-Vermilion'],
['0411','Southern Lake Erie'],
['041100','Southern Lake Erie'],
['04110001','Black-Rocky'],
['04110002','Cuyahoga'],
['04110003','Ashtabula-Chagrin'],
['04110004','Grand'],
['0412','Eastern Lake Erie-Lake Erie'],
['041201','Eastern Lake Erie'],
['04120101','Chautauqua-Conneaut'],
['04120102','Cattaraugus'],
['04120103','Buffalo-Eighteenmile'],
['04120104','Niagara'],
['041202','Lake Erie'],
['04120200','Lake Erie'],
['0413','Southwestern Lake Ontario'],
['041300','Southwestern Lake Ontario'],
['04130001','Oak Orchard-Twelvemile'],
['04130002','Upper Genesee'],
['04130003','Lower Genesee'],
['0414','Southeastern Lake Ontario'],
['041401','Southeastern Lake Ontario'],
['04140101','Irondequoit-Ninemile'],
['04140102','Salmon-Sandy'],
['041402','Oswego'],
['04140201','Seneca'],
['04140202','Oneida'],
['04140203','Oswego'],
['0415','Northeastern Lake Ontario-Lake Ontario-St. Lawrence'],
['041501','Northeastern Lake Ontario'],
['04150101','Black'],
['04150102','Chaumont-Perch'],
['041502','Lake Ontario'],
['04150200','Lake Ontario'],
['041503','St. Lawrence'],
['04150301','Upper St. Lawrence'],
['04150302','Oswegatchie'],
['04150303','Indian'],
['04150304','Grass'],
['04150305','Raquette'],
['04150306','St. Regis'],
['04150307','English-Salmon'],
['05','Ohio'],
['0501','Allegheny'],
['050100','Allegheny'],
['05010001','Upper Allegheny'],
['05010002','Conewango'],
['05010003','Middle Allegheny-Tionesta'],
['05010004','French'],
['05010005','Clarion'],
['05010006','Middle Allegheny-Redbank'],
['05010007','Conemaugh'],
['05010008','Kiskiminetas'],
['05010009','Lower Allegheny'],
['0502','Monongahela'],
['050200','Monongahela'],
['05020001','Tygart Valley'],
['05020002','West Fork'],
['05020003','Upper Monongahela'],
['05020004','Cheat'],
['05020005','Lower Monongahela'],
['05020006','Youghiogheny'],
['0503','Upper Ohio'],
['050301','Upper Ohio-Beaver'],
['05030101','Upper Ohio'],
['05030102','Shenango'],
['05030103','Mahoning'],
['05030104','Beaver'],
['05030105','Connoquenessing'],
['05030106','Upper Ohio-Wheeling'],
['050302','Upper Ohio-Little Kanawha'],
['05030201','Little Muskingum-Middle Island'],
['05030202','Upper Ohio-Shade'],
['05030203','Little Kanawha'],
['05030204','Hocking'],
['0504','Muskingum'],
['050400','Muskingum'],
['05040001','Tuscarawas'],
['05040002','Mohican'],
['05040003','Walhonding'],
['05040004','Muskingum'],
['05040005','Wills'],
['05040006','Licking'],
['0505','Kanawha'],
['050500','Kanawha'],
['05050001','Upper New'],
['05050002','Middle New'],
['05050003','Greenbrier'],
['05050004','Lower New'],
['05050005','Gauley'],
['05050006','Upper Kanawha'],
['05050007','Elk'],
['05050008','Lower Kanawha'],
['05050009','Coal'],
['0506','Scioto'],
['050600','Scioto'],
['05060001','Upper Scioto'],
['05060002','Lower Scioto'],
['05060003','Paint'],
['0507','Big Sandy-Guyandotte'],
['050701','Guyandotte'],
['05070101','Upper Guyandotte'],
['05070102','Lower Guyandotte'],
['050702','Big Sandy'],
['05070201','Tug'],
['05070202','Upper Levisa'],
['05070203','Lower Levisa'],
['05070204','Big Sandy'],
['0508','Great Miami'],
['050800','Great Miami'],
['05080001','Upper Great Miami'],
['05080002','Lower Great Miami'],
['05080003','Whitewater'],
['0509','Middle Ohio'],
['050901','Middle Ohio-Raccoon'],
['05090101','Raccoon-Symmes'],
['05090102','Twelvepole'],
['05090103','Little Scioto-Tygarts'],
['05090104','Little Sandy'],
['050902','Middle Ohio-Little Miami'],
['05090201','Ohio Brush-Whiteoak'],
['05090202','Little Miami'],
['05090203','Middle Ohio-Laughery'],
['0510','Kentucky-Licking'],
['051001','Licking'],
['05100101','Licking'],
['05100102','South Fork Licking'],
['051002','Kentucky'],
['05100201','North Fork Kentucky'],
['05100202','Middle Fork Kentucky'],
['05100203','South Fork Kentucky'],
['05100204','Upper Kentucky'],
['05100205','Lower Kentucky'],
['0511','Green'],
['051100','Green'],
['05110001','Upper Green'],
['05110002','Barren'],
['05110003','Middle Green'],
['05110004','Rough'],
['05110005','Lower Green'],
['05110006','Pond'],
['0512','Wabash'],
['051201','Wabash'],
['05120101','Upper Wabash'],
['05120102','Salamonie'],
['05120103','Mississinewa'],
['05120104','Eel'],
['05120105','Middle Wabash-Deer'],
['05120106','Tippecanoe'],
['05120107','Wildcat'],
['05120108','Middle Wabash-Little Vermilion'],
['05120109','Vermilion'],
['05120110','Sugar'],
['05120111','Middle Wabash-Busseron'],
['05120112','Embarras'],
['05120113','Lower Wabash'],
['05120114','Little Wabash'],
['05120115','Skillet'],
['051202','Patoka-White'],
['05120201','Upper White'],
['05120202','Lower White'],
['05120203','Eel'],
['05120204','Driftwood'],
['05120205','Flatrock-Haw'],
['05120206','Upper East Fork White'],
['05120207','Muscatatuck'],
['05120208','Lower East Fork White'],
['05120209','Patoka'],
['0513','Cumberland'],
['051301','Upper Cumberland'],
['05130101','Upper Cumberland'],
['05130102','Rockcastle'],
['05130103','Upper Cumberland-Lake Cumberland'],
['05130104','South Fork Cumberland'],
['05130105','Obey'],
['05130106','Upper Cumberland-Cordell Hull'],
['05130107','Collins'],
['05130108','Caney'],
['051302','Lower Cumberland'],
['05130201','Lower Cumberland-Old Hickory Lake'],
['05130202','Lower Cumberland-Sycamore'],
['05130203','Stones'],
['05130204','Harpeth'],
['05130205','Lower Cumberland'],
['05130206','Red'],
['0514','Lower Ohio'],
['051401','Lower Ohio-Salt'],
['05140101','Silver-Little Kentucky'],
['05140102','Salt'],
['05140103','Rolling Fork'],
['05140104','Blue-Sinking'],
['051402','Lower Ohio'],
['05140201','Lower Ohio-Little Pigeon'],
['05140202','Highland-Pigeon'],
['05140203','Lower Ohio-Bay'],
['05140204','Saline'],
['05140205','Tradewater'],
['05140206','Lower Ohio'],
['06','Tennessee'],
['0601','Upper Tennessee'],
['060101','French Broad-Holston'],
['06010101','North Fork Holston'],
['06010102','South Fork Holston'],
['06010103','Watauga'],
['06010104','Holston'],
['06010105','Upper French Broad'],
['06010106','Pigeon'],
['06010107','Lower French Broad'],
['06010108','Nolichucky'],
['060102','Upper Tennessee'],
['06010201','Watts Bar Lake'],
['06010202','Upper Little Tennessee'],
['06010203','Tuckasegee'],
['06010204','Lower Little Tennessee'],
['06010205','Upper Clinch'],
['06010206','Powell'],
['06010207','Lower Clinch'],
['06010208','Emory'],
['0602','Middle Tennessee-Hiwassee'],
['060200','Middle Tennessee-Hiwassee'],
['06020001','Middle Tennessee-Chickamauga'],
['06020002','Hiwassee'],
['06020003','Ocoee'],
['06020004','Sequatchie'],
['0603','Middle Tennessee-Elk'],
['060300','Middle Tennessee-Elk'],
['06030001','Guntersville Lake'],
['06030002','Wheeler Lake'],
['06030003','Upper Elk'],
['06030004','Lower Elk'],
['06030005','Pickwick Lake'],
['06030006','Bear'],
['0604','Lower Tennessee'],
['060400','Lower Tennessee'],
['06040001','Lower Tennessee-Beech'],
['06040002','Upper Duck'],
['06040003','Lower Duck'],
['06040004','Buffalo'],
['06040005','Kentucky Lake'],
['06040006','Lower Tennessee'],
['07','Upper Mississippi'],
['0701','Mississippi Headwaters'],
['070101','Mississippi Headwaters'],
['07010101','Mississippi Headwaters'],
['07010102','Leech Lake'],
['07010103','Prairie-Willow'],
['07010104','Elk-Nokasippi'],
['07010105','Pine'],
['07010106','Crow Wing'],
['07010107','Redeye'],
['07010108','Long Prairie'],
['070102','Upper Mississippi-Crow-Rum'],
['07010201','Platte-Spunk'],
['07010202','Sauk'],
['07010203','Clearwater-Elk'],
['07010204','Crow'],
['07010205','South Fork Crow'],
['07010206','Twin Cities'],
['07010207','Rum'],
['0702','Minnesota'],
['070200','Minnesota'],
['07020001','Upper Minnesota'],
['07020002','Pomme De Terre'],
['07020003','Lac Qui Parle'],
['07020004','Hawk-Yellow Medicine'],
['07020005','Chippewa'],
['07020006','Redwood'],
['07020007','Middle Minnesota'],
['07020008','Cottonwood'],
['07020009','Blue Earth'],
['07020010','Watonwan'],
['07020011','Le Sueur'],
['07020012','Lower Minnesota'],
['0703','St. Croix'],
['070300','St. Croix'],
['07030001','Upper St. Croix'],
['07030002','Namekagon'],
['07030003','Kettle'],
['07030004','Snake'],
['07030005','Lower St. Croix'],
['0704','Upper Mississippi-Black-Root'],
['070400','Upper Mississippi-Black-Root'],
['07040001','Rush-Vermillion'],
['07040002','Cannon'],
['07040003','Buffalo-Whitewater'],
['07040004','Zumbro'],
['07040005','Trempealeau'],
['07040006','La Crosse-Pine'],
['07040007','Black'],
['07040008','Root'],
['0705','Chippewa'],
['070500','Chippewa'],
['07050001','Upper Chippewa'],
['07050002','Flambeau'],
['07050003','South Fork Flambeau'],
['07050004','Jump'],
['07050005','Lower Chippewa'],
['07050006','Eau Claire'],
['07050007','Red Cedar'],
['0706','Upper Mississippi-Maquoketa-Plum'],
['070600','Upper Mississippi-Maquoketa-Plum'],
['07060001','Coon-Yellow'],
['07060002','Upper Iowa'],
['07060003','Grant-Little Maquoketa'],
['07060004','Turkey'],
['07060005','Apple-Plum'],
['07060006','Maquoketa'],
['0707','Wisconsin'],
['070700','Wisconsin'],
['07070001','Upper Wisconsin'],
['07070002','Lake Dubay'],
['07070003','Castle Rock'],
['07070004','Baraboo'],
['07070005','Lower Wisconsin'],
['07070006','Kickapoo'],
['0708','Upper Mississippi-Iowa-Skunk-Wapsipinicon'],
['070801','Upper Mississippi-Skunk-Wapsipinicon'],
['07080101','Copperas-Duck'],
['07080102','Upper Wapsipinicon'],
['07080103','Lower Wapsipinicon'],
['07080104','Flint-Henderson'],
['07080105','South Skunk'],
['07080106','North Skunk'],
['07080107','Skunk'],
['070802','Iowa'],
['07080201','Upper Cedar'],
['07080202','Shell Rock'],
['07080203','Winnebago'],
['07080204','West Fork Cedar'],
['07080205','Middle Cedar'],
['07080206','Lower Cedar'],
['07080207','Upper Iowa'],
['07080208','Middle Iowa'],
['07080209','Lower Iowa'],
['0709','Rock'],
['070900','Rock'],
['07090001','Upper Rock'],
['07090002','Crawfish'],
['07090003','Pecatonica'],
['07090004','Sugar'],
['07090005','Lower Rock'],
['07090006','Kishwaukee'],
['07090007','Green'],
['0710','Des Moines'],
['071000','Des Moines'],
['07100001','Des Moines Headwaters'],
['07100002','Upper Des Moines'],
['07100003','East Fork Des Moines'],
['07100004','Middle Des Moines'],
['07100005','Boone'],
['07100006','North Raccoon'],
['07100007','South Raccoon'],
['07100008','Lake Red Rock'],
['07100009','Lower Des Moines'],
['0711','Upper Mississippi-Salt'],
['071100','Upper Mississippi-Salt'],
['07110001','Bear-Wyaconda'],
['07110002','North Fabius'],
['07110003','South Fabius'],
['07110004','The Sny'],
['07110005','North Fork Salt'],
['07110006','South Fork Salt'],
['07110007','Salt'],
['07110008','Cuivre'],
['07110009','Peruque-Piasa'],
['0712','Upper Illinois'],
['071200','Upper Illinois'],
['07120001','Kankakee'],
['07120002','Iroquois'],
['07120003','Chicago'],
['07120004','Des Plaines'],
['07120005','Upper Illinois'],
['07120006','Upper Fox'],
['07120007','Lower Fox'],
['0713','Lower Illinois'],
['071300','Lower Illinois'],
['07130001','Lower Illinois-Senachwine Lake'],
['07130002','Vermilion'],
['07130003','Lower Illinois-Lake Chautauqua'],
['07130004','Mackinaw'],
['07130005','Spoon'],
['07130006','Upper Sangamon'],
['07130007','South Fork Sangamon'],
['07130008','Lower Sangamon'],
['07130009','Salt'],
['07130010','La Moine'],
['07130011','Lower Illinois'],
['07130012','Macoupin'],
['0714','Upper Mississippi-Kaskaskia-Meramec'],
['071401','Upper Mississippi-Meramec'],
['07140101','Cahokia-Joachim'],
['07140102','Meramec'],
['07140103','Bourbeuse'],
['07140104','Big'],
['07140105','Upper Mississippi-Cape Girardeau'],
['07140106','Big Muddy'],
['07140107','Whitewater'],
['07140108','Cache'],
['071402','Kaskaskia'],
['07140201','Upper Kaskaskia'],
['07140202','Middle Kaskaskia'],
['07140203','Shoal'],
['07140204','Lower Kaskaskia'],
['08','Lower Mississippi'],
['0801','Lower Mississippi-Hatchie'],
['080101','Lower Mississippi-Memphis'],
['08010100','Lower Mississippi-Memphis'],
['080102','Hatchie-Obion'],
['08010201','Bayou De Chien-Mayfield'],
['08010202','Obion'],
['08010203','South Fork Obion'],
['08010204','North Fork Forked Deer'],
['08010205','South Fork Forked Deer'],
['08010206','Forked Deer'],
['08010207','Upper Hatchie'],
['08010208','Lower Hatchie'],
['08010209','Loosahatchie'],
['08010210','Wolf'],
['08010211','Horn Lake-Nonconnah'],
['0802','Lower Mississippi - St. Francis'],
['080201','Lower Mississippi-Helena'],
['08020100','Lower Mississippi-Helena'],
['080202','St. Francis'],
['08020201','New Madrid-St. Johns'],
['08020202','Upper St. Francis'],
['08020203','Lower St. Francis'],
['08020204','Little River Ditches'],
['08020205',"L'anguille"],
['080203','Lower White'],
['08020301','Lower White-Bayou Des Arc'],
['08020302','Cache'],
['08020303','Lower White'],
['08020304','Big'],
['080204','Lower Arkansas'],
['08020401','Lower Arkansas'],
['08020402','Bayou Meto'],
['0803','Lower Mississippi - Yazoo'],
['080301','Lower Mississippi-Greenville'],
['08030100','Lower Mississippi-Greenville'],
['080302','Yazoo'],
['08030201','Little Tallahatchie'],
['08030202','Tallahatchie'],
['08030203','Yocona'],
['08030204','Coldwater'],
['08030205','Yalobusha'],
['08030206','Upper Yazoo'],
['08030207','Big Sunflower'],
['08030208','Lower Yazoo'],
['08030209','Deer-Steele'],
['0804','Lower Red - Ouachita'],
['080401','Upper Ouachita'],
['08040101','Ouachita Headwaters'],
['08040102','Upper Ouachita'],
['08040103','Little Missouri'],
['080402','Lower Ouachita'],
['08040201','Lower Ouachita-Smackover'],
['08040202','Lower Ouachita-Bayou De Loutre'],
['08040203','Upper Saline'],
['08040204','Lower Saline'],
['08040205','Bayou Bartholomew'],
['08040206',"Bayou D'arbonne"],
['08040207','Lower Ouachita'],
['080403','Lower Red'],
['08040301','Lower Red'],
['08040302','Castor'],
['08040303','Dugdemona'],
['08040304','Little'],
['08040305','Black'],
['08040306','Bayou Cocodrie'],
['0805','Boeuf-Tensas'],
['080500','Boeuf-Tensas'],
['08050001','Boeuf'],
['08050002','Bayou Macon'],
['08050003','Tensas'],
['0806','Lower Mississippi - Big Black'],
['080601','Lower Mississippi-Natchez'],
['08060100','Lower Mississippi-Natchez'],
['080602','Big Black - Homochitto'],
['08060201','Upper Big Black'],
['08060202','Lower Big Black'],
['08060203','Bayou Pierre'],
['08060204','Coles Creek'],
['08060205','Homochitto'],
['08060206','Buffalo'],
['0807','Lower Mississippi-Lake Maurepas'],
['080701','Lower Mississippi-Baton Rouge'],
['08070100','Lower Mississippi-Baton Rouge'],
['080702','Lake Maurepas'],
['08070201','Bayou Sara-Thompson'],
['08070202','Amite'],
['08070203','Tickfaw'],
['08070204','Lake Maurepas'],
['08070205','Tangipahoa'],
['080703','Lower Grand'],
['08070300','Lower Grand'],
['0808','Louisiana Coastal'],
['080801','Atchafalaya - Vermilion'],
['08080101','Atchafalaya'],
['08080102','Bayou Teche'],
['08080103','Vermilion'],
['080802','Calcasieu - Mermentau'],
['08080201','Mermentau Headwaters'],
['08080202','Mermentau'],
['08080203','Upper Calcasieu'],
['08080204','Whisky Chitto'],
['08080205','West Fork Calcasieu'],
['08080206','Lower Calcasieu'],
['0809','Lower Mississippi'],
['080901','Lower Mississippi-New Orleans'],
['08090100','Lower Mississippi-New Orleans'],
['080902','Lake Pontchartrain'],
['08090201','Liberty Bayou-Tchefuncta'],
['08090202','Lake Pontchartrain'],
['08090203','Eastern Louisiana Coastal'],
['080903','Central Louisiana Coastal'],
['08090301','East Central Louisiana Coastal'],
['08090302','West Central Louisiana Coastal'],
['09','Souris-Red-Rainy'],
['0901','Souris'],
['090100','Souris'],
['09010001','Upper Souris'],
['09010002','Des Lacs'],
['09010003','Lower Souris'],
['09010004','Willow'],
['09010005','Deep'],
['0902','Red'],
['090201','Upper Red'],
['09020101','Bois De Sioux'],
['09020102','Mustinka'],
['09020103','Otter Tail'],
['09020104','Upper Red'],
['09020105','Western Wild Rice'],
['09020106','Buffalo'],
['09020107','Elm-Marsh'],
['09020108','Eastern Wild Rice'],
['09020109','Goose'],
['090202','Devils Lake-Sheyenne'],
['09020201','Devils Lake'],
['09020202','Upper Sheyenne'],
['09020203','Middle Sheyenne'],
['09020204','Lower Sheyenne'],
['09020205','Maple'],
['090203','Lower Red'],
['09020301','Sandhill-Wilson'],
['09020302','Red Lakes'],
['09020303','Red Lake'],
['09020304','Thief'],
['09020305','Clearwater'],
['09020306','Grand Marais-Red'],
['09020307','Turtle'],
['09020308','Forest'],
['09020309','Snake'],
['09020310','Park'],
['09020311','Lower Red'],
['09020312','Two Rivers'],
['09020313','Pembina'],
['09020314','Roseau'],
['0903','Rainy'],
['090300','Rainy'],
['09030001','Rainy Headwaters'],
['09030002','Vermilion'],
['09030003','Rainy Lake'],
['09030004','Upper Rainy'],
['09030005','Little Fork'],
['09030006','Big Fork'],
['09030007','Rapid'],
['09030008','Lower Rainy'],
['09030009','Lake of the Woods'],
['10','Missouri'],
['1001','Saskatchewan'],
['100100','Saskatchewan'],
['10010001','Belly'],
['10010002','St. Mary'],
['1002','Missouri Headwaters'],
['100200','Missouri Headwaters'],
['10020001','Red Rock'],
['10020002','Beaverhead'],
['10020003','Ruby'],
['10020004','Big Hole'],
['10020005','Jefferson'],
['10020006','Boulder'],
['10020007','Madison'],
['10020008','Gallatin'],
['1003','Missouri-Marias'],
['100301','Upper Missouri'],
['10030101','Upper Missouri'],
['10030102','Upper Missouri-Dearborn'],
['10030103','Smith'],
['10030104','Sun'],
['10030105','Belt'],
['100302','Marias'],
['10030201','Two Medicine'],
['10030202','Cut Bank'],
['10030203','Marias'],
['10030204','Willow'],
['10030205','Teton'],
['1004','Missouri-Musselshell'],
['100401','Fort Peck Lake'],
['10040101','Bullwhacker-Dog'],
['10040102','Arrow'],
['10040103','Judith'],
['10040104','Fort Peck Reservoir'],
['10040105','Big Dry'],
['10040106','Little Dry'],
['100402','Musselshell'],
['10040201','Upper Musselshell'],
['10040202','Middle Musselshell'],
['10040203','Flatwillow'],
['10040204','Box Elder'],
['10040205','Lower Musselshell'],
['1005','Milk'],
['100500','Milk'],
['10050001','Milk Headwaters'],
['10050002','Upper Milk'],
['10050003','Wild Horse Lake'],
['10050004','Middle Milk'],
['10050005','Big Sandy'],
['10050006','Sage'],
['10050007','Lodge'],
['10050008','Battle'],
['10050009','Peoples'],
['10050010','Cottonwood'],
['10050011','Whitewater'],
['10050012','Lower Milk'],
['10050013','Frenchman'],
['10050014','Beaver'],
['10050015','Rock'],
['10050016','Porcupine'],
['1006','Missouri-Poplar'],
['100600','Missouri-Poplar'],
['10060001','Prarie Elk-Wolf'],
['10060002','Redwater'],
['10060003','Poplar'],
['10060004','West Fork Poplar'],
['10060005','Charlie-Little Muddy'],
['10060006','Big Muddy'],
['10060007','Brush Lake closed basin'],
['1007','Upper Yellowstone'],
['100700','Upper Yellowstone'],
['10070001','Yellowstone Headwaters'],
['10070002','Upper Yellowstone'],
['10070003','Shields'],
['10070004','Upper Yellowstone-Lake Basin'],
['10070005','Stillwater'],
['10070006','Clarks Fork Yellowstone'],
['10070007','Upper Yellowstone-Pompeys Pillar'],
['10070008','Pryor'],
['1008','Big Horn'],
['100800','Big Horn'],
['10080001','Upper Wind'],
['10080002','Little Wind'],
['10080003','Popo Agie'],
['10080004','Muskrat'],
['10080005','Lower Wind'],
['10080006','Badwater'],
['10080007','Upper Bighorn'],
['10080008','Nowood'],
['10080009','Greybull'],
['10080010','Big Horn Lake'],
['10080011','Dry'],
['10080012','North Fork Shoshone'],
['10080013','South Fork Shoshone'],
['10080014','Shoshone'],
['10080015','Lower Bighorn'],
['10080016','Little Bighorn'],
['1009','Powder-Tongue'],
['100901','Tongue'],
['10090101','Upper Tongue'],
['10090102','Lower Tongue'],
['100902','Powder'],
['10090201','Middle Fork Powder'],
['10090202','Upper Powder'],
['10090203','South Fork Powder'],
['10090204','Salt'],
['10090205','Crazy Woman'],
['10090206','Clear'],
['10090207','Middle Powder'],
['10090208','Little Powder'],
['10090209','Lower Powder'],
['10090210','Mizpah'],
['1010','Lower Yellowstone'],
['101000','Lower Yellowstone'],
['10100001','Lower Yellowstone-Sunday'],
['10100002','Big Porcupine'],
['10100003','Rosebud'],
['10100004','Lower Yellowstone'],
['10100005',"O'fallon"],
['1011','Missouri-Little Missouri'],
['101101','Lake Sakakawea'],
['10110101','Lake Sakakawea'],
['10110102','Little Muddy'],
['101102','Little Missouri'],
['10110201','Upper Little Missouri'],
['10110202','Boxelder'],
['10110203','Middle Little Missouri'],
['10110204','Beaver'],
['10110205','Lower Little Missouri'],
['1012','Cheyenne'],
['101201','Cheyenne'],
['10120101','Antelope'],
['10120102','Dry Fork Cheyenne'],
['10120103','Upper Cheyenne'],
['10120104','Lance'],
['10120105','Lightning'],
['10120106','Angostura Reservoir'],
['10120107','Beaver'],
['10120108','Hat'],
['10120109','Middle Cheyenne-Spring'],
['10120110','Rapid'],
['10120111','Middle Cheyenne-Elk'],
['10120112','Lower Cheyenne'],
['10120113','Cherry'],
['101202','Belle Fourche'],
['10120201','Upper Belle Fourche'],
['10120202','Lower Belle Fourche'],
['10120203','Redwater'],
['1013','Missouri-Oahe'],
['101301','Lake Oahe'],
['10130101','Painted Woods-Square Butte'],
['10130102','Upper Lake Oahe'],
['10130103','Apple'],
['10130104','Beaver'],
['10130105','Lower Lake Oahe'],
['10130106','West Missouri Coteau'],
['101302','Cannonball-Heart-Knife'],
['10130201','Knife'],
['10130202','Upper Heart'],
['10130203','Lower Heart'],
['10130204','Upper Cannonball'],
['10130205','Cedar'],
['10130206','Lower Cannonball'],
['101303','Grand-Moreau'],
['10130301','North Fork Grand'],
['10130302','South Fork Grand'],
['10130303','Grand'],
['10130304','South Fork Moreau'],
['10130305','Upper Moreau'],
['10130306','Lower Moreau'],
['1014','Missouri-White'],
['101401','Fort Randall Reservoir'],
['10140101','Fort Randall Reservoir'],
['10140102','Bad'],
['10140103','Medicine Knoll'],
['10140104','Medicine'],
['10140105','Crow'],
['101402','White'],
['10140201','Upper White'],
['10140202','Middle White'],
['10140203','Little White'],
['10140204','Lower White'],
['1015','Niobrara'],
['101500','Niobrara'],
['10150001','Ponca'],
['10150002','Niobrara Headwaters'],
['10150003','Upper Niobrara'],
['10150004','Middle Niobrara'],
['10150005','Snake'],
['10150006','Keya Paha'],
['10150007','Lower Niobrara'],
['1016','James'],
['101600','James'],
['10160001','James Headwaters'],
['10160002','Pipestem'],
['10160003','Upper James'],
['10160004','Elm'],
['10160005','Mud'],
['10160006','Middle James'],
['10160007','East Missouri Coteau'],
['10160008','Snake'],
['10160009','Turtle'],
['10160010','North Big Sioux Coteau'],
['10160011','Lower James'],
['1017','Missouri-Big Sioux'],
['101701','Lewis and Clark Lake'],
['10170101','Lewis and Clark Lake'],
['10170102','Vermillion'],
['10170103','South Big Sioux Coteau'],
['101702','Big Sioux'],
['10170201','Middle Big Sioux Coteau'],
['10170202','Upper Big Sioux'],
['10170203','Lower Big Sioux'],
['10170204','Rock'],
['1018','North Platte'],
['101800','North Platte'],
['10180001','North Platte Headwaters'],
['10180002','Upper North Platte'],
['10180003','Pathfinder-Seminoe Reservoirs'],
['10180004','Medicine Bow'],
['10180005','Little Medicine Bow'],
['10180006','Sweetwater'],
['10180007','Middle North Platte-Casper'],
['10180008','Glendo Reservoir'],
['10180009','Middle North Platte-Scotts Bluff'],
['10180010','Upper Laramie'],
['10180011','Lower Laramie'],
['10180012','Horse'],
['10180013','Pumpkin'],
['10180014','Lower North Platte'],
['1019','South Platte'],
['101900','South Platte'],
['10190001','South Platte Headwaters'],
['10190002','Upper South Platte'],
['10190003','Middle South Platte-Cherry Creek'],
['10190004','Clear'],
['10190005','St. Vrain'],
['10190006','Big Thompson'],
['10190007','Cache La Poudre'],
['10190008','Lone Tree-Owl'],
['10190009','Crow'],
['10190010','Kiowa'],
['10190011','Bijou'],
['10190012','Middle South Platte-Sterling'],
['10190013','Beaver'],
['10190014','Pawnee'],
['10190015','Upper Lodgepole'],
['10190016','Lower Lodgepole'],
['10190017','Sidney Draw'],
['10190018','Lower South Platte'],
['1020','Platte'],
['102001','Middle Platte'],
['10200101','Middle Platte-Buffalo'],
['10200102','Wood'],
['10200103','Middle Platte-Prairie'],
['102002','Lower Platte'],
['10200201','Lower Platte-Shell'],
['10200202','Lower Platte'],
['10200203','Salt'],
['1021','Loup'],
['102100','Loup'],
['10210001','Upper Middle Loup'],
['10210002','Dismal'],
['10210003','Lower Middle Loup'],
['10210004','South Loup'],
['10210005','Mud'],
['10210006','Upper North Loup'],
['10210007','Lower North Loup'],
['10210008','Calamus'],
['10210009','Loup'],
['10210010','Cedar'],
['1022','Elkhorn'],
['102200','Elkhorn'],
['10220001','Upper Elkhorn'],
['10220002','North Fork Elkhorn'],
['10220003','Lower Elkhorn'],
['10220004','Logan'],
['1023','Missouri-Little Sioux'],
['102300','Missouri-Little Sioux'],
['10230001','Blackbird-Soldier'],
['10230002','Floyd'],
['10230003','Little Sioux'],
['10230004','Monona-Harrison Ditch'],
['10230005','Maple'],
['10230006','Big Papillion-Mosquito'],
['10230007','Boyer'],
['1024','Missouri-Nishnabotna'],
['102400','Missouri-Nishnabotna'],
['10240001','Keg-Weeping Water'],
['10240002','West Nishnabotna'],
['10240003','East Nishnabotna'],
['10240004','Nishnabotna'],
['10240005','Tarkio-Wolf'],
['10240006','Little Nemaha'],
['10240007','South Fork Big Nemaha'],
['10240008','Big Nemaha'],
['10240009','West Nodaway'],
['10240010','Nodaway'],
['10240011','Independence-Sugar'],
['10240012','Platte'],
['10240013','One Hundred and Two'],
['1025','Republican'],
['102500','Republican'],
['10250001','Arikaree'],
['10250002','North Fork Republican'],
['10250003','South Fork Republican'],
['10250004','Upper Republican'],
['10250005','Frenchman'],
['10250006','Stinking Water'],
['10250007','Red Willow'],
['10250008','Medicine'],
['10250009','Harlan County Reservoir'],
['10250010','Upper Sappa'],
['10250011','Lower Sappa'],
['10250012','South Fork Beaver'],
['10250013','Little Beaver'],
['10250014','Beaver'],
['10250015','Prairie Dog'],
['10250016','Middle Republican'],
['10250017','Lower Republican'],
['1026','Smoky Hill'],
['102600','Smoky Hill'],
['10260001','Smoky Hill Headwaters'],
['10260002','North Fork Smoky Hill'],
['10260003','Upper Smoky Hill'],
['10260004','Ladder'],
['10260005','Hackberry'],
['10260006','Middle Smoky Hill'],
['10260007','Big'],
['10260008','Lower Smoky Hill'],
['10260009','Upper Saline'],
['10260010','Lower Saline'],
['10260011','Upper North Fork Solomon'],
['10260012','Lower North Fork Solomon'],
['10260013','Upper South Fork Solomon'],
['10260014','Lower South Fork Solomon'],
['10260015','Solomon'],
['1027','Kansas'],
['102701','Kansas'],
['10270101','Upper Kansas'],
['10270102','Middle Kansas'],
['10270103','Delaware'],
['10270104','Lower Kansas'],
['102702','Big Blue'],
['10270201','Upper Big Blue'],
['10270202','Middle Big Blue'],
['10270203','West Fork Big Blue'],
['10270204','Turkey'],
['10270205','Lower Big Blue'],
['10270206','Upper Little Blue'],
['10270207','Lower Little Blue'],
['1028','Chariton-Grand'],
['102801','Grand'],
['10280101','Upper Grand'],
['10280102','Thompson'],
['10280103','Lower Grand'],
['102802','Chariton'],
['10280201','Upper Chariton'],
['10280202','Lower Chariton'],
['10280203','Little Chariton'],
['1029','Gasconade-Osage'],
['102901','Osage'],
['10290101','Upper Marais Des Cygnes'],
['10290102','Lower Marais Des Cygnes'],
['10290103','Little Osage'],
['10290104','Marmaton'],
['10290105','Harry S. Missouri'],
['10290106','Sac'],
['10290107','Pomme De Terre'],
['10290108','South Grand'],
['10290109','Lake of the Ozarks'],
['10290110','Niangua'],
['10290111','Lower Osage'],
['102902','Gasconade'],
['10290201','Upper Gasconade'],
['10290202','Big Piney'],
['10290203','Lower Gasconade'],
['1030','Lower Missouri'],
['103001','Lower Missouri-Blackwater'],
['10300101','Lower Missouri-Crooked'],
['10300102','Lower Missouri-Moreau'],
['10300103','Lamine'],
['10300104','Blackwater'],
['103002','Lower Missouri'],
['10300200','Lower Missouri'],
['11','Arkansas-White-Red'],
['1101','Upper White'],
['110100','Upper White'],
['11010001','Beaver Reservoir'],
['11010002','James'],
['11010003','Bull Shoals Lake'],
['11010004','Middle White'],
['11010005','Buffalo'],
['11010006','North Fork White'],
['11010007','Upper Black'],
['11010008','Current'],
['11010009','Lower Black'],
['11010010','Spring'],
['11010011','Eleven Point'],
['11010012','Strawberry'],
['11010013','Upper White-Village'],
['11010014','Little Red'],
['1102','Upper Arkansas'],
['110200','Upper Arkansas'],
['11020001','Arkansas Headwaters'],
['11020002','Upper Arkansas'],
['11020003','Fountain'],
['11020004','Chico'],
['11020005','Upper Arkansas-Lake Meredith'],
['11020006','Huerfano'],
['11020007','Apishapa'],
['11020008','Horse'],
['11020009','Upper Arkansas-John Martin'],
['11020010','Purgatoire'],
['11020011','Big Sandy'],
['11020012','Rush'],
['11020013','Two Butte'],
['1103','Middle Arkansas'],
['110300','Middle Arkansas'],
['11030001','Middle Arkansas-Lake Mckinney'],
['11030002','Whitewoman'],
['11030003','Arkansas-Dodge City'],
['11030004','Coon-Pickerel'],
['11030005','Pawnee'],
['11030006','Buckner'],
['11030007','Upper Walnut Creek'],
['11030008','Lower Walnut Creek'],
['11030009','Rattlesnake'],
['11030010','Gar-Peace'],
['11030011','Cow'],
['11030012','Little Arkansas'],
['11030013','Middle Arkansas-Slate'],
['11030014','North Fork Ninnescah'],
['11030015','South Fork Ninnescah'],
['11030016','Ninnescah'],
['11030017','Upper Walnut River'],
['11030018','Lower Walnut River'],
['1104','Upper Cimarron'],
['110400','Upper Cimarron'],
['11040001','Cimarron headwaters'],
['11040002','Upper Cimarron'],
['11040003','North Fork Cimarron'],
['11040004','Sand Arroyo'],
['11040005','Bear'],
['11040006','Upper Cimarron-Liberal'],
['11040007','Crooked'],
['11040008','Upper Cimarron-Bluff'],
['1105','Lower Cimarron'],
['110500','Lower Cimarron'],
['11050001','Lower Cimarron-Eagle Chief'],
['11050002','Lower Cimarron-Skeleton'],
['11050003','Lower Cimarron'],
['1106','Arkansas - Keystone'],
['110600','Arkansas - Keystone'],
['11060001','Kaw Lake'],
['11060002','Upper Salt Fork Arkansas'],
['11060003','Medicine Lodge'],
['11060004','Lower Salt Fork Arkansas'],
['11060005','Chikaskia'],
['11060006','Black Bear-Red Rock'],
['1107','Neosho - Verdigris'],
['110701','Verdigris'],
['11070101','Upper Verdigris'],
['11070102','Fall'],
['11070103','Middle Verdigris'],
['11070104','Elk'],
['11070105','Lower Verdigris'],
['11070106','Caney'],
['11070107','Bird'],
['110702','Neosho'],
['11070201','Neosho headwaters'],
['11070202','Upper Cottonwood'],
['11070203','Lower Cottonwood'],
['11070204','Upper Neosho'],
['11070205','Middle Neosho'],
['11070206',"Lake O' the Cherokees"],
['11070207','Spring'],
['11070208','Elk'],
['11070209','Lower Neosho'],
['1108','Upper Canadian'],
['110800','Upper Canadian'],
['11080001','Canadian headwaters'],
['11080002','Cimarron'],
['11080003','Upper Canadian'],
['11080004','Mora'],
['11080005','Conchas'],
['11080006','Upper Canadian-Ute Reservoir'],
['11080007','Ute'],
['11080008','Revuelto'],
['1109','Lower Canadian'],
['110901','Middle Canadian'],
['11090101','Middle Canadian-Trujillo'],
['11090102','Punta De Agua'],
['11090103','Rita Blanca'],
['11090104','Carrizo'],
['11090105','Lake Meredith'],
['11090106','Middle Canadian-Spring'],
['110902','Lower Canadian'],
['11090201','Lower Canadian-Deer'],
['11090202','Lower Canadian-Walnut'],
['11090203','Little'],
['11090204','Lower Canadian'],
['1110','North Canadian'],
['111001','Upper Beaver'],
['11100101','Upper Beaver'],
['11100102','Middle Beaver'],
['11100103','Coldwater'],
['11100104','Palo Duro'],
['111002','Lower Beaver'],
['11100201','Lower Beaver'],
['11100202','Upper Wolf'],
['11100203','Lower Wolf'],
['111003','Lower North Canadian'],
['11100301','Middle North Canadian'],
['11100302','Lower North Canadian'],
['11100303','Deep Fork'],
['1111','Lower Arkansas'],
['111101','Robert S. Kerr Reservoir'],
['11110101','Polecat-Snake'],
['11110102','Dirty-Greenleaf'],
['11110103','Illinois'],
['11110104','Robert S. Kerr Reservoir'],
['11110105','Poteau'],
['111102','Lower Arkansas-Fourche La Fave'],
['11110201','Frog-Mulberry'],
['11110202','Dardanelle Reservoir'],
['11110203','Lake Conway-Point Remove'],
['11110204','Petit Jean'],
['11110205','Cadron'],
['11110206','Fourche La Fave'],
['11110207','Lower Arkansas-Maumelle'],
['1112','Red headwaters'],
['111201','Prairie Dog Town Fork Red'],
['11120101','Tierra Blanca'],
['11120102','Palo Duro'],
['11120103','Upper Prairie Dog Town Fork Red'],
['11120104','Tule'],
['11120105','Lower Prairie Dog Town Fork Red'],
['111202','Salt Fork Red'],
['11120201','Upper Salt Fork Red'],
['11120202','Lower Salt Fork Red'],
['111203','North Fork Red'],
['11120301','Upper North Fork Red'],
['11120302','Middle North Fork Red'],
['11120303','Lower North Fork Red'],
['11120304','Elm Fork Red'],
['1113','Red - Washita'],
['111301','Red-Pease'],
['11130101','Groesbeck-Sandy'],
['11130102','Blue-China'],
['11130103','North Pease'],
['11130104','Middle Pease'],
['11130105','Pease'],
['111302','Red-Lake Texoma'],
['11130201','Farmers-Mud'],
['11130202','Cache'],
['11130203','West Cache'],
['11130204','North Wichita'],
['11130205','South Wichita'],
['11130206','Wichita'],
['11130207','Southern Beaver'],
['11130208','Northern Beaver'],
['11130209','Little Wichita'],
['11130210','Lake Texoma'],
['111303','Washita'],
['11130301','Washita headwaters'],
['11130302','Upper Washita'],
['11130303','Middle Washita'],
['11130304','Lower Washita'],
['1114','Red-Sulphur'],
['111401','Red-Little'],
['11140101',"Bois D'arc-Island"],
['11140102','Blue'],
['11140103','Muddy Boggy'],
['11140104','Clear Boggy'],
['11140105','Kiamichi'],
['11140106','Pecan-Waterhole'],
['11140107','Upper Little'],
['11140108','Mountain Fork'],
['11140109','Lower Little'],
['111402','Red-Saline'],
['11140201','Mckinney-Posten Bayous'],
['11140202','Middle Red-Coushatta'],
['11140203','Loggy Bayou'],
['11140204','Red Chute'],
['11140205','Bodcau Bayou'],
['11140206','Bayou Pierre'],
['11140207','Lower Red-Lake Iatt'],
['11140208','Saline Bayou'],
['11140209','Black Lake Bayou'],
['111403','Big Cypress - Sulphur'],
['11140301','Sulphur headwaters'],
['11140302','Lower Sulphur'],
['11140303','White Oak Bayou'],
['11140304','Cross Bayou'],
['11140305',"Lake O'the Pines"],
['11140306','Caddo Lake'],
['11140307','Little Cypress'],
['12','Texas-Gulf'],
['1201','Sabine'],
['120100','Sabine'],
['12010001','Upper Sabine'],
['12010002','Middle Sabine'],
['12010003','Lake Fork'],
['12010004','Toledo Bend Reservoir'],
['12010005','Lower Sabine'],
['1202','Neches'],
['120200','Neches'],
['12020001','Upper Neches'],
['12020002','Middle Neches'],
['12020003','Lower Neches'],
['12020004','Upper Angelina'],
['12020005','Lower Angelina'],
['12020006','Village'],
['12020007','Pine Island Bayou'],
['1203','Trinity'],
['120301','Upper Trinity'],
['12030101','Upper West Fork Trinity'],
['12030102','Lower West Fork Trinity'],
['12030103','Elm Fork Trinity'],
['12030104','Denton'],
['12030105','Upper Trinity'],
['12030106','East Fork Trinity'],
['12030107','Cedar'],
['12030108','Richland'],
['12030109','Chambers'],
['120302','Lower Trinity'],
['12030201','Lower Trinity-Tehuacana'],
['12030202','Lower Trinity-Kickapoo'],
['12030203','Lower Trinity'],
['1204','Galveston Bay-San Jacinto'],
['120401','San Jacinto'],
['12040101','West Fork San Jacinto'],
['12040102','Spring'],
['12040103','East Fork San Jacinto'],
['12040104','Buffalo-San Jacinto'],
['120402','Galveston Bay-Sabine Lake'],
['12040201','Sabine Lake'],
['12040202','East Galveston Bay'],
['12040203','North Galveston Bay'],
['12040204','West Galveston Bay'],
['12040205','Austin-Oyster'],
['1205','Brazos headwaters'],
['120500','Brazos headwaters'],
['12050001','Yellow House Draw'],
['12050002','Blackwater Draw'],
['12050003','North Fork Double Mountain Fork'],
['12050004','Double Moutain Fork Brazos'],
['12050005','Running Water Draw'],
['12050006','White'],
['12050007','Salt Fork Brazos'],
['1206','Middle Brazos'],
['120601','Middle Brazos-Clear Fork'],
['12060101','Middle Brazos-Millers'],
['12060102','Upper Clear Fork Brazos'],
['12060103','Paint'],
['12060104','Lower Clear Fork Brazos'],
['12060105','Hubbard'],
['120602','Middle Brazos-Bosque'],
['12060201','Middle Brazos-Palo Pinto'],
['12060202','Middle Brazos-Lake Whitney'],
['12060203','Bosque'],
['12060204','North Bosque'],
['1207','Lower Brazos'],
['120701','Lower Brazos'],
['12070101','Lower Brazos-Little Brazos'],
['12070102','Yegua'],
['12070103','Navasota'],
['12070104','Lower Brazos'],
['120702','Little'],
['12070201','Leon'],
['12070202','Cowhouse'],
['12070203','Lampasas'],
['12070204','Little'],
['12070205','San Gabriel'],
['1208','Upper Colorado'],
['120800','Upper Colorado'],
['12080001','Lost Draw'],
['12080002','Colorado headwaters'],
['12080003','Monument-Seminole Draws'],
['12080004','Mustang Draw'],
['12080005','Johnson Draw'],
['12080006','Sulphur Springs Draw'],
['12080007','Beals'],
['12080008','Upper Colorado'],
['1209','Lower Colorado-San Bernard Coastal'],
['120901','Middle Colorado-Concho'],
['12090101','Middle Colorado-Elm'],
['12090102','South Concho'],
['12090103','Middle Concho'],
['12090104','North Concho'],
['12090105','Concho'],
['12090106','Middle Colorado'],
['12090107','Pecan Bayou'],
['12090108','Jim Ned'],
['12090109','San Saba'],
['12090110','Brady'],
['120902','Middle Colorado-Llano'],
['12090201','Buchanan-Lyndon B'],
['12090202','North Llano'],
['12090203','South Llano'],
['12090204','Llano'],
['12090205','Austin-Travis Lakes'],
['12090206','Pedernales'],
['120903','Lower Colorado'],
['12090301','Lower Colorado-Cummins'],
['12090302','Lower Colorado'],
['120904','San Bernard Coastal'],
['12090401','San Bernard'],
['12090402','East Matagorda Bay'],
['1210','Central Texas Coastal'],
['121001','Lavaca'],
['12100101','Lavaca'],
['12100102','Navidad'],
['121002','Guadalupe'],
['12100201','Upper Guadalupe'],
['12100202','Middle Guadalupe'],
['12100203','San Marcos'],
['12100204','Lower Guadalupe'],
['121003','San Antonio'],
['12100301','Upper San Antonio'],
['12100302','Medina'],
['12100303','Lower San Antonio'],
['12100304','Cibolo'],
['121004','Central Texas Coastal'],
['12100401','Central Matagorda Bay'],
['12100402','West Matagorda Bay'],
['12100403','East San Antonio Bay'],
['12100404','West San Antonio Bay'],
['12100405','Aransas Bay'],
['12100406','Mission'],
['12100407','Aransas'],
['1211','Nueces-Southwestern Texas Coastal'],
['121101','Nueces'],
['12110101','Nueces headwaters'],
['12110102','West Nueces'],
['12110103','Upper Nueces'],
['12110104','Turkey'],
['12110105','Middle Nueces'],
['12110106','Upper Frio'],
['12110107','Hondo'],
['12110108','Lower Frio'],
['12110109','San Miguel'],
['12110110','Atascosa'],
['12110111','Lower Nueces'],
['121102','Southwestern Texas Coastal'],
['12110201','North Corpus Christi Bay'],
['12110202','South Corpus Christi Bay'],
['12110203','North Laguna Madre'],
['12110204','San Fernando'],
['12110205','Baffin Bay'],
['12110206','Palo Blanco'],
['12110207','Central Laguna Madre'],
['12110208','South Laguna Madre'],
['13','Rio Grande'],
['1301','Rio Grande headwaters'],
['130100','Rio Grande headwaters'],
['13010001','Rio Grande headwaters'],
['13010002','Alamosa-Trinchera'],
['13010003','San Luis'],
['13010004','Saguache'],
['13010005','Conejos'],
['1302','Rio Grande-Elephant Butte'],
['130201','Upper Rio Grande'],
['13020101','Upper Rio Grande'],
['13020102','Rio Chama'],
['130202','Rio Grande-Elephant Butte'],
['13020201','Rio Grande-Santa Fe'],
['13020202','Jemez'],
['13020203','Rio Grande-Albuquerque'],
['13020204','Rio Puerco'],
['13020205','Arroyo Chico'],
['13020206','North Plains'],
['13020207','Rio San Jose'],
['13020208','Plains of San Agustin'],
['13020209','Rio Salado'],
['13020210','Jornada Del Muerto'],
['13020211','Elephant Butte Reservoir'],
['1303','Rio Grande-Mimbres'],
['130301','Rio Grande-Caballo'],
['13030101','Caballo'],
['13030102','El Paso-Las Cruces'],
['13030103','Jornada Draw'],
['130302','Mimbres'],
['13030201','Playas Lake'],
['13030202','Mimbres'],
['1304','Rio Grande-Amistad'],
['130401','Rio Grande-Fort Quitman'],
['13040100','Rio Grande-Fort Quitman'],
['130402','Rio Grande-Amistad'],
['13040201','Cibolo-Red Light'],
['13040202','Alamito'],
['13040203','Black Hills-Fresno'],
['13040204','Terlingua'],
['13040205','Big Bend'],
['13040206','Maravillas'],
['13040207','Santiago Draw'],
['13040208','Reagan-Sanderson'],
['13040209','San Francisco'],
['13040210','Lozier Canyon'],
['13040211','Big Canyon'],
['13040212','Amistad Reservoir'],
['130403','Devils'],
['13040301','Upper Devils'],
['13040302','Lower Devils'],
['13040303','Dry Devils'],
['1305','Rio Grande closed basins'],
['130500','Rio Grande closed basins'],
['13050001','Western Estancia'],
['13050002','Eastern Estancia'],
['13050003','Tularosa Valley'],
['13050004','Salt Basin'],
['1306','Upper Pecos'],
['130600','Upper Pecos'],
['13060001','Pecos headwaters'],
['13060002','Pintada Arroyo'],
['13060003','Upper Pecos'],
['13060004','Taiban'],
['13060005','Arroyo Del Macho'],
['13060006','Gallo Arroyo'],
['13060007','Upper Pecos-Long Arroyo'],
['13060008','Rio Hondo'],
['13060009','Rio Felix'],
['13060010','Rio Penasco'],
['13060011','Upper Pecos-Black'],
['1307','Lower Pecos'],
['130700','Lower Pecos'],
['13070001','Lower Pecos-Red Bluff Reservoir'],
['13070002','Delaware'],
['13070003','Toyah'],
['13070004','Salt Draw'],
['13070005','Barrilla Draw'],
['13070006','Coyanosa-Hackberry Draws'],
['13070007','Landreth-Monument Draws'],
['13070008','Lower Pecos'],
['13070009','Tunas'],
['13070010','Independence'],
['13070011','Howard Draw'],
['1308','Rio Grande-Falcon'],
['130800','Rio Grande-Falcon'],
['13080001','Elm-Sycamore'],
['13080002','San Ambrosia-Santa Isabel'],
['13080003','International Falcon Reservoir'],
['1309','Lower Rio Grande'],
['130900','Lower Rio Grande'],
['13090001','Los Olmos'],
['13090002','Lower Rio Grande'],
['14','Upper Colorado'],
['1401','Colorado headwaters'],
['140100','Colorado headwaters'],
['14010001','Colorado headwaters'],
['14010002','Blue'],
['14010003','Eagle'],
['14010004','Roaring Fork'],
['14010005','Colorado headwaters-Plateau'],
['14010006','Parachute-Roan'],
['1402','Gunnison'],
['140200','Gunnison'],
['14020001','East-Taylor'],
['14020002','Upper Gunnison'],
['14020003','Tomichi'],
['14020004','North Fork Gunnison'],
['14020005','Lower Gunnison'],
['14020006','Uncompahange'],
['1403','Upper Colorado-Dolores'],
['140300','Upper Colorado-Dolores'],
['14030001','Westwater Canyon'],
['14030002','Upper Dolores'],
['14030003','San Miguel'],
['14030004','Lower Dolores'],
['14030005','Upper Colorado-Kane Springs'],
['1404','Great Divide - Upper Green'],
['140401','Upper Green'],
['14040101','Upper Green'],
['14040102','New Fork'],
['14040103','Upper Green-Slate'],
['14040104','Big Sandy'],
['14040105','Bitter'],
['14040106','Upper Green-Flaming Gorge Reservoir'],
['14040107','Blacks Fork'],
['14040108','Muddy'],
['14040109','Vermilion'],
['140402','Great Divide closed basin'],
['14040200','Great Divide closed basin'],
['1405','White-Yampa'],
['140500','White - Yampa'],
['14050001','Upper Yampa'],
['14050002','Lower Yampa'],
['14050003','Little Snake'],
['14050004','Muddy'],
['14050005','Upper White'],
['14050006','Piceance-Yellow'],
['14050007','Lower White'],
['1406','Lower Green'],
['140600','Lower Green'],
['14060001','Lower Green-Diamond'],
['14060002','Ashley-Brush'],
['14060003','Duchesne'],
['14060004','Strawberry'],
['14060005','Lower Green-Desolation Canyon'],
['14060006','Willow'],
['14060007','Price'],
['14060008','Lower Green'],
['14060009','San Rafael'],
['1407','Upper Colorado-Dirty Devil'],
['140700','Upper Colorado-Dirty Devil'],
['14070001','Upper Lake Powell'],
['14070002','Muddy'],
['14070003','Fremont'],
['14070004','Dirty Devil'],
['14070005','Escalante'],
['14070006','Lower Lake Powell'],
['14070007','Paria'],
['1408','San Juan'],
['140801','Upper San Juan'],
['14080101','Upper San Juan'],
['14080102','Piedra'],
['14080103','Blanco Canyon'],
['14080104','Animas'],
['14080105','Middle San Juan'],
['14080106','Chaco'],
['14080107','Mancos'],
['140802','Lower San Juan'],
['14080201','Lower San Juan-Four Corners'],
['14080202','Mcelmo'],
['14080203','Montezuma'],
['14080204','Chinle'],
['14080205','Lower San Juan'],
['15','Lower Colorado'],
['1501','Lower Colorado-Lake Mead'],
['150100','Lower Colorado-Lake Mead'],
['15010001','Lower Colorado-Marble Canyon'],
['15010002','Grand Canyon'],
['15010003','Kanab'],
['15010004','Havasu Canyon'],
['15010005','Lake Mead'],
['15010006','Grand Wash'],
['15010007','Hualapai Wash'],
['15010008','Upper Virgin'],
['15010009','Fort Pierce Wash'],
['15010010','Lower Virgin'],
['15010011','White'],
['15010012','Muddy'],
['15010013','Meadow Valley Wash'],
['15010014','Detrital Wash'],
['15010015','Las Vegas Wash'],
['1502','Little Colorado'],
['150200','Little Colorado'],
['15020001','Little Colorado headwaters'],
['15020002','Upper Little Colorado'],
['15020003','Carrizo Wash'],
['15020004','Zuni'],
['15020005','Silver'],
['15020006','Upper Puerco'],
['15020007','Lower Puerco'],
['15020008','Middle Little Colorado'],
['15020009','Leroux Wash'],
['15020010','Chevelon Canyon'],
['15020011','Cottonwood Wash'],
['15020012','Corn-Oraibi'],
['15020013','Polacca Wash'],
['15020014','Jadito Wash'],
['15020015','Canyon Diablo'],
['15020016','Lower Little Colorado'],
['15020017','Dinnebito Wash'],
['15020018','Moenkopi Wash'],
['1503','Lower Colorado'],
['150301','Lower Colorado'],
['15030101','Havasu-Mohave Lakes'],
['15030102','Piute Wash'],
['15030103','Sacramento Wash'],
['15030104','Imperial Reservoir'],
['15030105','Bouse Wash'],
['15030106','Tyson Wash'],
['15030107','Lower Colorado'],
['15030108','Yuma Desert'],
['150302','Bill Williams'],
['15030201','Big Sandy'],
['15030202','Burro'],
['15030203','Santa Maria'],
['15030204','Bill Williams'],
['1504','Upper Gila'],
['150400','Upper Gila'],
['15040001','Upper Gila'],
['15040002','Upper Gila-Mangas'],
['15040003','Animas Valley'],
['15040004','San Francisco'],
['15040005','Upper Gila-San Carlos Reservoir'],
['15040006','San Simon'],
['15040007','San Carlos'],
['1505','Middle Gila'],
['150501','Middle Gila'],
['15050100','Middle Gila'],
['150502','San Pedro-Willcox'],
['15050201','Willcox Playa'],
['15050202','Upper San Pedro'],
['15050203','Lower San Pedro'],
['150503','Santa Cruz'],
['15050301','Upper Santa Cruz'],
['15050302','Rillito'],
['15050303','Lower Santa Cruz'],
['15050304','Brawley Wash'],
['15050305','Aguirre Valley'],
['15050306','Santa Rosa Wash'],
['1506','Salt'],
['150601','Salt'],
['15060101','Black'],
['15060102','White'],
['15060103','Upper Salt'],
['15060104','Carrizo'],
['15060105','Tonto'],
['15060106','Lower Salt'],
['150602','Verde'],
['15060201','Big Chino-Williamson Valley'],
['15060202','Upper Verde'],
['15060203','Lower Verde'],
['1507','Lower Gila'],
['150701','Lower Gila-Agua Fria'],
['15070101','Lower Gila-Painted Rock Reservoir'],
['15070102','Agua Fria'],
['15070103','Hassayampa'],
['15070104','Centennial Wash'],
['150702','Lower Gila'],
['15070201','Lower Gila'],
['15070202','Tenmile Wash'],
['15070203','San Cristobal Wash'],
['1508','Sonora'],
['150801','Rio Sonoyta'],
['15080101','San Simon Wash'],
['15080102','Rio Sonoyta'],
['15080103','Tule Desert'],
['150802','Rio De La Concepcion'],
['15080200','Rio De La Concepcion'],
['150803','Rio De Bavispe'],
['15080301','Whitewater Draw'],
['15080302','San Bernardino Valley'],
['15080303','Cloverdale'],
['16','Great Basin'],
['1601','Bear'],
['160101','Upper Bear'],
['16010101','Upper Bear'],
['16010102','Central Bear'],
['160102','Lower Bear'],
['16010201','Bear Lake'],
['16010202','Middle Bear'],
['16010203','Little Bear-Logan'],
['16010204','Lower Bear-Malad'],
['1602','Great Salt Lake'],
['160201','Weber'],
['16020101','Upper Weber'],
['16020102','Lower Weber'],
['160202','Jordan'],
['16020201','Utah Lake'],
['16020202','Spanish Fork'],
['16020203','Provo'],
['16020204','Jordan'],
['160203','Great Salt Lake'],
['16020301','Hamlin-Snake Valleys'],
['16020302','Pine Valley'],
['16020303','Tule Valley'],
['16020304','Rush-Tooele Valleys'],
['16020305','Skull Valley'],
['16020306','Southern Great Salt Lake Desert'],
['16020307','Pilot-Thousand Springs'],
['16020308','Northern Great Salt Lake Desert'],
['16020309','Curlew Valley'],
['16020310','Great Salt Lake'],
['1603','Escalante Desert-Sevier Lake'],
['160300','Escalante Desert-Sevier Lake'],
['16030001','Upper Sevier'],
['16030002','East Fork Sevier'],
['16030003','Middle Sevier'],
['16030004','San Pitch'],
['16030005','Lower Sevier'],
['16030006','Escalante Desert'],
['16030007','Beaver Bottoms-Upper Beaver'],
['16030008','Lower Beaver'],
['16030009','Sevier Lake'],
['1604','Black Rock Desert-Humboldt'],
['160401','Humboldt'],
['16040101','Upper Humboldt'],
['16040102','North Fork Humboldt'],
['16040103','South Fork Humboldt'],
['16040104','Pine'],
['16040105','Middle Humboldt'],
['16040106','Rock'],
['16040107','Reese'],
['16040108','Lower Humboldt'],
['16040109','Little Humboldt'],
['160402','Black Rock Desert'],
['16040201','Upper Quinn'],
['16040202','Lower Quinn'],
['16040203','Smoke Creek Desert'],
['16040204','Massacre Lake'],
['16040205','Thousand-Virgin'],
['1605','Central Lahontan'],
['160501','Truckee'],
['16050101','Lake Tahoe'],
['16050102','Truckee'],
['16050103','Pyramid-Winnemucca Lakes'],
['16050104','Granite Springs Valley'],
['160502','Carson'],
['16050201','Upper Carson'],
['16050202','Middle Carson'],
['16050203','Carson Desert'],
['160503','Walker'],
['16050301','East Walker'],
['16050302','West Walker'],
['16050303','Walker'],
['16050304','Walker Lake'],
['1606','Central Nevada Desert Basins'],
['160600','Central Nevada Desert Basins'],
['16060001','Dixie Valley'],
['16060002','Gabbs Valley'],
['16060003','Southern Big Smoky Valley'],
['16060004','Northern Big Smoky Valley'],
['16060005','Diamond-Monitor Valleys'],
['16060006','Little Smoky-Newark Valleys'],
['16060007','Long-Ruby Valleys'],
['16060008','Spring-Steptoe Valleys'],
['16060009','Dry Lake Valley'],
['16060010','Fish Lake-Soda Spring Valleys'],
['16060011','Ralston-Stone Cabin Valleys'],
['16060012','Hot Creek-Railroad Valleys'],
['16060013','Cactus-Sarcobatus Flats'],
['16060014','Sand Spring-Tikaboo Valleys'],
['16060015','Ivanpah-Pahrump Valleys'],
['17','Pacific Northwest'],
['1701','Kootenai-Pend Oreille-Spokane'],
['170101','Kootenai'],
['17010101','Upper Kootenai'],
['17010102','Fisher'],
['17010103','Yaak'],
['17010104','Lower Kootenai'],
['17010105','Moyie'],
['170102','Pend Oreille'],
['17010201','Upper Clark Fork'],
['17010202','Flint-Rock'],
['17010203','Blackfoot'],
['17010204','Middle Clark Fork'],
['17010205','Bitterroot'],
['17010206','North Fork Flathead'],
['17010207','Middle Fork Flathead'],
['17010208','Flathead Lake'],
['17010209','South Fork Flathead'],
['17010210','Stillwater'],
['17010211','Swan'],
['17010212','Lower Flathead'],
['17010213','Lower Clark Fork'],
['17010214','Pend Oreille Lake'],
['17010215','Priest'],
['17010216','Pend Oreille'],
['170103','Spokane'],
['17010301',"Upper Coeur D'alene"],
['17010302',"South Fork Coeur D'alene"],
['17010303',"Coeur D'alene Lake"],
['17010304','St. Joe'],
['17010305','Upper Spokane'],
['17010306','Hangman'],
['17010307','Lower Spokane'],
['17010308','Little Spokane'],
['1702','Upper Columbia'],
['170200','Upper Columbia'],
['17020001','Franklin D. Roosevelt Lake'],
['17020002','Kettle'],
['17020003','Colville'],
['17020004','Sanpoil'],
['17020005','Chief Joseph'],
['17020006','Okanogan'],
['17020007','Similkameen'],
['17020008','Methow'],
['17020009','Lake Chelan'],
['17020010','Upper Columbia-Entiat'],
['17020011','Wenatchee'],
['17020012','Moses Coulee'],
['17020013','Upper Crab'],
['17020014','Banks Lake'],
['17020015','Lower Crab'],
['17020016','Upper Columbia-Priest Rapids'],
['1703','Yakima'],
['170300','Yakima'],
['17030001','Upper Yakima'],
['17030002','Naches'],
['17030003','Lower Yakima, Washington'],
['1704','Upper Snake'],
['170401','Snake headwaters'],
['17040101','Snake headwaters'],
['17040102','Gros Ventre'],
['17040103','Greys-Hobock'],
['17040104','Palisades'],
['17040105','Salt'],
['170402','Upper Snake'],
['17040201','Idaho Falls'],
['17040202','Upper Henrys'],
['17040203','Lower Henrys'],
['17040204','Teton'],
['17040205','Willow'],
['17040206','American Falls'],
['17040207','Blackfoot'],
['17040208','Portneuf'],
['17040209','Lake Walcott'],
['17040210','Raft'],
['17040211','Goose'],
['17040212','Upper Snake-Rock'],
['17040213','Salmon Falls'],
['17040214','Beaver-Camas'],
['17040215','Medicine Lodge'],
['17040216','Birch'],
['17040217','Little Lost'],
['17040218','Big Lost'],
['17040219','Big Wood'],
['17040220','Camas'],
['17040221','Little Wood'],
['1705','Middle Snake'],
['170501','Middle Snake-Boise'],
['17050101','C. J. Idaho'],
['17050102','Bruneau'],
['17050103','Middle Snake-Succor'],
['17050104','Upper Owyhee'],
['17050105','South Fork Owyhee'],
['17050106','East Little Owyhee. Nevada,'],
['17050107','Middle Owyhee'],
['17050108','Jordan'],
['17050109','Crooked-Rattlesnake'],
['17050110','Lower Owyhee'],
['17050111','North and Middle Forks Boise'],
['17050112','Boise-Mores'],
['17050113','South Fork Boise'],
['17050114','Lower Boise'],
['17050115','Middle Snake-Payette'],
['17050116','Upper Malheur'],
['17050117','Lower Malheur'],
['17050118','Bully'],
['17050119','Willow'],
['17050120','South Fork Payette'],
['17050121','Middle Fork Payette'],
['17050122','Payette'],
['17050123','North Fork Payette'],
['17050124','Weiser'],
['170502','Middle Snake-Powder'],
['17050201','Brownlee Reservoir'],
['17050202','Burnt'],
['17050203','Powder'],
['1706','Lower Snake'],
['170601','Lower Snake'],
['17060101','Hells Canyon'],
['17060102','Imnaha'],
['17060103','Lower Snake-Asotin'],
['17060104','Upper Grande Ronde'],
['17060105','Wallowa'],
['17060106','Lower Grande Ronde'],
['17060107','Lower Snake-Tucannon'],
['17060108','Palouse'],
['17060109','Rock'],
['17060110','Lower Snake'],
['170602','Salmon'],
['17060201','Upper Salmon'],
['17060202','Pahsimeroi'],
['17060203','Middle Salmon-Panther'],
['17060204','Lemhi'],
['17060205','Upper Middle Fork Salmon'],
['17060206','Lower Middle Fork Salmon'],
['17060207','Middle Salmon-Chamberlain'],
['17060208','South Fork Salmon'],
['17060209','Lower Salmon'],
['17060210','Little Salmon'],
['170603','Clearwater'],
['17060301','Upper Selway'],
['17060302','Lower Selway'],
['17060303','Lochsa'],
['17060304','Middle Fork Clearwater'],
['17060305','South Fork Clearwater'],
['17060306','Clearwater'],
['17060307','Upper North Fork Clearwater'],
['17060308','Lower North Fork Clearwater'],
['1707','Middle Columbia'],
['170701','Middle Columbia'],
['17070101','Middle Columbia-Lake Wallula'],
['17070102','Walla Walla'],
['17070103','Umatilla'],
['17070104','Willow'],
['17070105','Middle Columbia-Hood'],
['17070106','Klickitat'],
['170702','John Day'],
['17070201','Upper John Day'],
['17070202','North Fork John Day'],
['17070203','Middle Fork John Day'],
['17070204','Lower John Day'],
['170703','Deschutes'],
['17070301','Upper Deschutes'],
['17070302','Little Deschutes'],
['17070303','Beaver-South Fork'],
['17070304','Upper Crooked'],
['17070305','Lower Crooked'],
['17070306','Lower Deschutes'],
['17070307','Trout'],
['1708','Lower Columbia'],
['170800','Lower Columbia'],
['17080001','Lower Columbia-Sandy'],
['17080002','Lewis'],
['17080003','Lower Columbia-Clatskanie'],
['17080004','Upper Cowlitz'],
['17080005','Lower Cowlitz'],
['17080006','Lower Columbia'],
['1709','Willamette'],
['170900','Willamette'],
['17090001','Middle Fork Willamette'],
['17090002','Coast Fork Willamette'],
['17090003','Upper Willamette'],
['17090004','Mckenzie'],
['17090005','North Santiam'],
['17090006','South Santiam'],
['17090007','Middle Willamette'],
['17090008','Yamhill'],
['17090009','Molalla-Pudding'],
['17090010','Tualatin'],
['17090011','Clackamas'],
['17090012','Lower Willamette'],
['1710','Oregon-Washington Coastal'],
['171001','Washington Coastal'],
['17100101','Hoh-Quillayute'],
['17100102','Queets-Quinault'],
['17100103','Upper Chehalis'],
['17100104','Lower Chehalis'],
['17100105','Grays Harbor'],
['17100106','Willapa Bay'],
['171002','Northern Oregon Coastal'],
['17100201','Necanicum'],
['17100202','Nehalem'],
['17100203','Wilson-Trask-Nestucca'],
['17100204','Siletz-Yaquina'],
['17100205','Alsea'],
['17100206','Siuslaw'],
['17100207','Siltcoos'],
['171003','Southern Oregon Coastal'],
['17100301','North Umpqua'],
['17100302','South Umpqua'],
['17100303','Umpqua'],
['17100304','Coos'],
['17100305','Coquille'],
['17100306','Sixes'],
['17100307','Upper Rogue'],
['17100308','Middle Rogue'],
['17100309','Applegate'],
['17100310','Lower Rogue'],
['17100311','Illinois'],
['17100312','Chetco'],
['1711','Puget Sound'],
['171100','Puget Sound'],
['17110001','Fraser'],
['17110002','Strait of Georgia'],
['17110003','San Juan Islands'],
['17110004','Nooksack'],
['17110005','Upper Skagit'],
['17110006','Sauk'],
['17110007','Lower Skagit'],
['17110008','Stillaguamish'],
['17110009','Skykomish'],
['17110010','Snoqualmie'],
['17110011','Snohomish'],
['17110012','Lake Washington'],
['17110013','Duwamish'],
['17110014','Puyallup'],
['17110015','Nisqually'],
['17110016','Deschutes'],
['17110017','Skokomish'],
['17110018','Hood Canal'],
['17110019','Puget Sound'],
['17110020','Dungeness-Elwha'],
['17110021','Crescent-Hoko'],
['1712','Oregon closed basins'],
['171200','Oregon closed basins'],
['17120001','Harney-Malheur Lakes'],
['17120002','Silvies'],
['17120003','Donner Und Blitzen'],
['17120004','Silver'],
['17120005','Summer Lake'],
['17120006','Lake Abert'],
['17120007','Warner Lakes'],
['17120008','Guano'],
['17120009','Alvord Lake'],
['18','California'],
['1801','Klamath-Northern California Coastal'],
['180101','Northern California Coastal'],
['18010101','Smith'],
['18010102','Mad-Redwood'],
['18010103','Upper Eel'],
['18010104','Middle Fork Eel'],
['18010105','Lower Eel'],
['18010106','South Fork Eel'],
['18010107','Mattole'],
['18010108','Big-Navarro-Garcia'],
['18010109','Gualala-Salmon'],
['18010110','Russian'],
['18010111','Bodega Bay'],
['180102','Klamath'],
['18010201','Williamson'],
['18010202','Sprague'],
['18010203','Upper Klamath Lake'],
['18010204','Lost'],
['18010205','Butte'],
['18010206','Upper Klamath'],
['18010207','Shasta'],
['18010208','Scott'],
['18010209','Lower Klamath'],
['18010210','Salmon'],
['18010211','Trinity'],
['18010212','South Fork Trinity'],
['1802','Sacramento'],
['180200','Upper Sacramento'],
['18020001','Goose Lake'],
['18020002','Upper Pit'],
['18020003','Lower Pit'],
['18020004','Mccloud'],
['18020005','Sacramento headwaters'],
['180201','Lower Sacramento'],
['18020101','Sacramento-Lower Cow-Lower Clear'],
['18020102','Lower Cottonwood'],
['18020103','Sacramento-Lower Thomes'],
['18020104','Sacramento-Stone Corral'],
['18020105','Lower Butte'],
['18020106','Lower Feather'],
['18020107','Lower Yuba'],
['18020108','Lower Bear'],
['18020109','Lower Sacramento'],
['18020110','Lower Cache'],
['18020111','Lower American'],
['18020112','Sacramento-Upper Clear'],
['18020113','Cottonwood headwaters'],
['18020114','Upper Elder-Upper Thomes'],
['18020115','Upper Stony'],
['18020116','Upper Cache'],
['18020117','Upper Putah'],
['18020118','Upper Cow-Battle'],
['18020119','Mill-Big Chico'],
['18020120','Upper Butte'],
['18020121','North Fork Feather'],
['18020122','East Branch North Fork Feather'],
['18020123','Middle Fork Feather'],
['18020124','Honcut headwaters'],
['18020125','Upper Yuba'],
['18020126','Upper Bear'],
['18020127','Upper Coon-Upper Auburn'],
['18020128','North Fork American'],
['18020129','South Fork American'],
['1803','Tulare-Buena Vista Lakes'],
['180300','Tulare-Buena Vista Lakes'],
['18030001','Upper Kern'],
['18030002','South Fork Kern'],
['18030003','Middle Kern-Upper Tehachapi-'],
['18030004','Upper Poso'],
['18030005','Upper Deer-Upper White'],
['18030006','Upper Tule'],
['18030007','Upper Kaweah'],
['18030008','Mill'],
['18030009','Upper Dry'],
['18030010','Upper King'],
['18030011','Upper Los Gatos-Avenal'],
['18030012','Tulare-Buena Vista Lakes'],
['1804','San Joaquin'],
['180400','San Joaquin'],
['18040001','Middle San Joaquin-Lower'],
['18040002','Middle San Joaquin-Lower'],
['18040003','San Joaquin Delta'],
['18040004','Lower Calaveras-Mormon Slough'],
['18040005','Lower Cosumnes-Lower Mokelumne'],
['18040006','Upper San Joaquin'],
['18040007','Upper Chowchilla-Upper Fresno'],
['18040008','Upper Merced'],
['18040009','Upper Tuolumne'],
['18040010','Upper Stanislaus'],
['18040011','Upper Calaveras'],
['18040012','Upper Mokelumne'],
['18040013','Upper Cosumnes'],
['18040014','Panoche-San Luis Reservoir'],
['1805','San Francisco Bay'],
['180500','San Francisco Bay'],
['18050001','Suisun Bay'],
['18050002','San Pablo Bay'],
['18050003','Coyote'],
['18050004','San Francisco Bay'],
['18050005','Tomales-Drake Bays'],
['18050006','San Francisco Coastal South'],
['1806','Central California Coastal'],
['180600','Central California Coastal'],
['18060001','San Lorenzo-Soquel'],
['18060002','Pajaro'],
['18060003','Carrizo Plain'],
['18060004','Estrella'],
['18060005','Salinas'],
['18060006','Central Coastal'],
['18060007','Cuyama'],
['18060008','Santa Maria'],
['18060009','San Antonio'],
['18060010','Santa Ynez'],
['18060011','Alisal-Elkhorn Sloughs'],
['18060012','Carmel'],
['18060013','Santa Barbara Coastal'],
['18060014','Santa Barbara Channel Islands'],
['1807','Southern California Coastal'],
['180701','Ventura-San Gabriel Coastal'],
['18070101','Ventura'],
['18070102','Santa Clara'],
['18070103','Calleguas'],
['18070104','Santa Monica Bay'],
['18070105','Los Angeles'],
['18070106','San Gabriel'],
['18070107','San Pedro Channel Islands'],
['180702','Santa Ana'],
['18070201','Seal Beach'],
['18070202','San Jacinto'],
['18070203','Santa Ana'],
['18070204','Newport Bay'],
['180703','Laguna-San Diego Coastal'],
['18070301','Aliso-San Onofre'],
['18070302','Santa Margarita'],
['18070303','San Luis Rey-Escondido'],
['18070304','San Diego'],
['18070305','Cottonwood-Tijuana'],
['1808','North Lahontan'],
['180800','North Lahontan'],
['18080001','Surprise Valley'],
['18080002','Madeline Plains'],
['18080003','Honey-Eagle Lakes'],
['1809','Northern Mojave-Mono Lake'],
['180901','Mono-Owens Lakes'],
['18090101','Mono Lake'],
['18090102','Crowley Lake'],
['18090103','Owens Lake'],
['180902','Northern Mojave'],
['18090201','Eureka-Saline Valleys'],
['18090202','Upper Amargosa'],
['18090203','Death Valley-Lower Amargosa'],
['18090204','Panamint Valley'],
['18090205','Indian Wells-Searles Valleys'],
['18090206','Antelope-Fremont Valleys'],
['18090207','Coyote-Cuddeback Lakes'],
['18090208','Mojave'],
['1810','Southern Mojave-Salton Sea'],
['181001','Southern Mojave'],
['18100100','Southern Mojave'],
['181002','Salton Sea'],
['18100200','Salton Sea'],
['19','Alaska'],
['1901','Arctic Slope'],
['190100','Arctic Slope'],
['19010001','East Arctic Slope'],
['19010002','Colville'],
['19010003','West Arctic Slope'],
['1902','Northwest Alaska'],
['190200','Northwest Alaska'],
['19020001','Kotzebue Sound'],
['19020002','Norton Sound-St. Lawrence Island'],
['1903','Yukon'],
['190300','Yukon'],
['19030001','Fortymile-White'],
['19030002','Upper Yukon'],
['19030003','Middle Yukon'],
['19030004','Tanana'],
['19030005','Koyukuk'],
['19030006','Lower Yukon'],
['1904','Southwest Alaska'],
['190400','Southwest Alaska'],
['19040001','Kuskokwim Bay-Nunivak Island-St. Matthew Island'],
['19040002','Bristol Bay'],
['19040003','Aleutian-Pribilof Islands'],
['1905','South Central Alaska'],
['190500','South Central Alaska'],
['19050001','Kodiak-Shelikof'],
['19050002','Cook Inlet'],
['19050003','Gulf of Alaska'],
['1906','Southeast Alaska'],
['190600','Southeast Alaska'],
['19060000','Southeast Alaska'],
['20','Hawaii'],
['2001','Hawaii'],
['200100','Hawaii'],
['20010000','Hawaii'],
['2002','Maui'],
['200200','Maui'],
['20020000','Maui'],
['2003','Kahoolawe'],
['200300','Kahoolawe'],
['20030000','Kahoolawe'],
['2004','Lanai'],
['200400','Lanai'],
['20040000','Lanai'],
['2005','Molokai'],
['200500','Molokai'],
['20050000','Molokai'],
['2006','Oahu'],
['200600','Oahu'],
['20060000','Oahu'],
['2007','Kauai'],
['200700','Kauai'],
['20070000','Kauai'],
['2008','Niihau'],
['200800','Niihau'],
['20080000','Niihau'],
['2009','Northwestern Hawaiian Islands'],
['200900','Northwestern Hawaiian Islands'],
['20090000','Northwestern Hawaiian Islands'],
['21','Caribbean'],
['2101','Puerto Rico'],
['210100','Puerto Rico'],
['21010001','Interior Puerto Rico'],
['21010002','Cibuco-Guajataca'],
['21010003','Culebrinas-Guanajibo'],
['21010004','Southern Puerto Rico'],
['21010005','Eastern Puerto Rico'],
['21010006','Puerto Rican Islands'],
['2102','Virgin Islands'],
['210200','Virgin Islands'],
['21020001','St. John-St. Thomas'],
['21020002','St. Croix'],
['2103','Caribbean Outlying Areas'],
['210300','Caribbean Outlying Areas'],
['21030001','Canal Zone'],
['21030002','Navassa'],
['21030003','Roncador-Serrana']]
def HUC_L2():
l2 = []
for l in hucs:
if len(l[0]) == 2:
l2.append(l)
return l2
if __name__ == '__main__':
print HUC_L2()
|
IMDProjects/IM_Climate
|
IM_Climate_py/hucs.py
|
Python
|
mit
| 103,491
|
[
"CRYSTAL",
"Elk"
] |
2a93baf26cd79ac641612cf615194d2d8d647d3a165e10052185f26527cfc409
|
#!/usr/bin/env python
#
# Read Output of a CP2K BSSE Calculation and Calculate the Interaction Energy.
# Only for 2 Component System A-B with 5 Subcalculations in one Output.
import os, shutil
from scm.plams import *
def main(name, assign):
try:
if isinstance(config['default_jobmanager'], JobManager):
print('PLAMS already loaded')
else:
raise
except:
if os.path.isdir('tmp.plams'):
shutil.rmtree('tmp.plams')
init(folder='tmp.plams')
job = Cp2kJob.load_external(name)
assert job.results.check_scf()
struct = toASE(Cp2kSettings2Mol(job.settings))
molInfo = job.results.grep_output(pattern="MOLECULE KIND INFORMATION", options="-A 9")
nAtoms = molInfo[9::11]
nAtoms = [ int(s.split()[-1]) for s in nAtoms ]
nKinds = molInfo[8::11]
nKinds = [ int(s.split()[-1]) for s in nKinds ]
#first entry is the generic non-bsse input
del nKinds[0], nAtoms[0]
electronInfo = job.results.grep_output(pattern="Number of electrons:", options="")
nElectrons = [ int(s.split()[-1]) for s in electronInfo ]
#spin A and B
nElectrons = [ (nElectrons[i],nElectrons[i+1]) for i in range(0,len(nElectrons),2) ]
energies = job.results.grep_output(pattern="SCF run converged in", options="-A 15")[15::17]
energies = [ float(s.split()[-1]) for s in energies ]
print("Total Number of Atoms: {}".format(len(struct)))
print("Electrons: {}".format(nElectrons))
print("Kinds of Atoms: {}".format(nKinds))
print("Number of Atoms: {}".format(nAtoms))
print("Energies: {}".format(energies))
print(assign)
interaction = energies[assign["AB"]]-energies[assign["A"]]-energies[assign["B"]]
print("Interaction Energy NOT Corrected: {}".format(interaction))
interaction_corr = energies[assign["AB"]]-energies[assign["AB_ghost"]]-energies[assign["A_ghostB"]]
print("Interaction Energy Corrected: {}".format(interaction_corr))
bsse = energies[assign["AB_ghost"]]-energies[assign["A"]]+energies[assign["A_ghostB"]]-energies[assign["B"]]
print("BSSE: {}".format(bsse))
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Analyze CP2K BSSE Calculation')
parser.add_argument('input', type=str, help='Directory of the CP2K BSSE Calculation')
parser.add_argument('--assign', type=int, nargs=5, help='Order of Energies. Default: 4 0 1 2 3', default=[4,0,1,2,3])
args = parser.parse_args()
assign = {}
for i,key in enumerate(["AB", "A", "B", "AB_ghost", "A_ghostB"]):
assign[key] = args.assign[i]
main(args.input, assign)
|
patrickmelix/Python4ChemistryTools
|
cp2k-bsse.py
|
Python
|
mit
| 2,654
|
[
"CP2K"
] |
730eb1a780adc71b89dc00dcbe730ea9e265c88cb2fd19dfe7d2474e31ef1a7e
|
"""ll.py - Implementation of an LL(1) (Left to right Leftmost) parser generator
"""
# Copyright 2012 Erich Blume <blume.erich@gmail.com>
# ===========================
#
# This file is part of pcc
#
# pcc is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pcc is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pcc, in a file called COPYING. If not, see
# <http://www.gnu.org/licenses/>.
from pcc.parser import Parser, GrammarError, ParsingError
from pcc.symbols import Symbol, Token, EOF, EPSILON, SymbolString, Lexeme
import itertools
import re
class LLParser(Parser):
def __init__(self,lexer):
self.lexer = lexer
self.finalized = False
self.productions = {}
self.start = None
self.terminals = {t for t in lexer.tokens.values()} | {EOF}
def addproduction(self,symbol,rule,action, start_production=False):
if self.finalized:
raise ValueError("Can't add a production after finalizing the "
"parser (maybe you called parse() too soon?")
symbol = Symbol(symbol)
if symbol.name in self.lexer.tokens:
raise GrammarError('Symbol conflicts with Token name: {}'.format(
symbol.name))
# Initial start_production check
if self.start is not None and start_production:
raise GrammarError('A Start production has already been specified.')
# Symbolize the rule
rule_symbols=SymbolString([_make_symbol(self.lexer,x)
for x in rule.split()])
# Handle epsilon-productions:
if len(rule_symbols) == 0:
rule_symbols = SymbolString((EPSILON,))
# Wrap up start_production stuff
if start_production:
self.start = (symbol,rule_symbols+EOF,action)
# Add the final production to the production table
if symbol not in self.productions:
self.productions[symbol] = []
self.productions[symbol].append((rule_symbols,action))
# Add any new implicit symbols to the production table
for implicit in rule_symbols:
if ( not implicit.terminal() and
not implicit in self.productions
):
self.productions[implicit] = []
# Add any new string literal tokens to the terminals set
self.terminals |= { s for s in rule_symbols if s.terminal() }
def finalize(self):
"""This function actually performs the 'parser generation' that gives
``pcc`` its name (compiler compiling). Callable only once, it constructs
the FIRST and FOLLOW sets, the parsing table, the action table, etc.
In general terms, it prepares the parser to be able to call 'parse'.
"""
# TODO - after this is called, the object should also be serializible.
# In the future, a finalized parser should support being written
# to (and read from) disk.
if self.finalized:
raise ValueError('Attempt to finalize an already finalized parser.')
self.finalized = True
if self.first is None:
raise GrammarError('At least one production must be marked as the '
'start production.')
# Initialize the (empty) FIRST and FOLLOW caches
self.FIRST = {}
self.FOLLOW = {symbol: set() for symbol in self.productions}
# Initialize the parsing table
self.ptable = {symbol: {terminal: [] for terminal in self.terminals}
for symbol in self.productions}
# Calculate the FOLLOW sets using the dynamic definition of FIRST sets
# (Note that this MUST happen before we call self.follow, so it MUST
# happen before the grammar error detection routine)
#
# This uses Aho et.al.'s definition, in which empty sets are created,
# the start symbol's FOLLOW is seeded with EOF, and then one loops over
# the entire grammar indefinitely making corrections until the answer
# (provably always) converges.
self.FOLLOW[self.start[0]] |= {EOF}
added_something_flag = True
while added_something_flag:
added_something_flag = False
for symbol, rules in self.productions.items():
for rule,action in rules:
for index in range(len(rule)):
rule_symbol = rule[index]
if rule_symbol.terminal():
continue
follow_set = self.FOLLOW[rule_symbol]
if index < len(rule)-1:
# "if there is more in this string
first_set = self.first(rule[index+1:])
added_something_flag |= _update_follow(
follow_set,first_set)
if EPSILON in first_set:
symbol_follow = self.FOLLOW[symbol]
added_something_flag |= _update_follow(
follow_set,symbol_follow)
elif index == len(rule)-1:
# The last symbol in the rule
symbol_follow = self.FOLLOW[symbol]
added_something_flag |= _update_follow(
follow_set, symbol_follow)
# Grammar error detection
for symbol, rules in self.productions.items():
# Detect the case that there are nonterminals without productions
if len(rules) == 0:
raise GrammarError('Symbol {} has no productions'.format(
symbol.name))
# LL(1) grammar rule dection
elif len(rules) > 1:
for (r1,_),(r2,_) in itertools.combinations(rules,2):
if (
not self.first(r1).isdisjoint(self.first(r2)) or
(EPSILON in self.first(r1) and not
self.first(r2).isdisjoint(self.follow(symbol))) or
(EPSILON in self.first(r2) and not
self.first(r1).isdisjoint(self.follow(symbol)))
):
raise GrammarError("Grammar is not LL(1) - ambiguous "
"derivation for symbol {}".format(
symbol.name))
# construct the parsing table
for symbol, rules in self.productions.items():
for rule,action in rules:
for term in self.first(rule): # either rule or symbol (GULP)
self.ptable[symbol][term].append((rule,action))
if EPSILON in self.first(rule):
for term in self.follow(symbol):
self.ptable[symbol][term].append((rule,action))
def first(self,symbols):
"""Return the set of terminal ``Symbol``s which belong to this string's
FIRST set. See Aho, Ullman et.al.'s 2nd edition "Compilers...",
section 4.4.2.
`symbols` must be a ``pcc.symbols.SymbolString`` object.
The returned value will be a set of terminal``Symbol`` objects.
"""
# First, finalize (end rule-adding phase)
if not self.finalized:
self.finalize()
# Dynamic return to cut down execution time
if symbols in self.FIRST:
return self.FIRST[symbols]
# Actual definition:
if len(symbols) == 1:
symbol = symbols[0]
if symbol.terminal():
# Terminal singletons are their own FIRST set
result = { symbol }
self.FIRST[symbols] = result
return result
else:
# Non-terminal singletons use the FIRST of every rule they
# produce
rules = self.productions[symbol]
result = set()
for rule,action in rules:
result |= self._first_string(rule)
if SymbolString((EPSILON,)) in rules:
result |= {EPSILON}
self.FIRST[symbols] = result
return result
else:
# String of symbols (non-singleton)
result = self._first_string(symbols)
self.FIRST[symbols] = result
return result
def _first_string(self,symbols):
"Helper func of ``first()`` on a SymbolString"
result = set()
flag_epsilon = True
for symbol in symbols:
new_set = self.first(SymbolString((symbol,)))
result |= (new_set - {EPSILON} )
if not EPSILON in new_set:
flag_epsilon = False
break
if flag_epsilon:
result |= {EPSILON}
return result
def follow(self,symbol):
"""Compute the FOLLOW set of a nonterminal symbol.
Due to the constraint programming approach used to calculate the
FOLLOW sets in this implementation, this function merely returns the
pre-computed set - the actual set computation occurs in finalize()
"""
if not self.finalized:
self.finalize()
if symbol.terminal():
raise ValueError('Attempt to compute FOLLOW of a terminal')
return self.FOLLOW[symbol]
def parse(self,input):
"""Use the recursive descent method to parse the input."""
if not self.finalized:
self.finalize()
lexer = _LexemeIterator(self.lexer,input)
start_symbol, start_rule, start_action = self.start
return _rd_parse_rule(start_rule,start_action,lexer,self.ptable)
def _make_symbol(lexer,name):
"""Helper function to symbolize the elements of a production's rule"""
# If it looks like a string literal, make a literal-like token
if len(name)==3 and name[0]=="'" and name[2]=="'":
# A quick reminder here that this token is NOT the same as
# the token called "LITERAL" that is generated automatically by the
# lexer when report_literals is True. This is a sort of Token Template,
# and if we get a Lexeme with the lexer's LITERAL token, we check to
# see if the matched lexeme text matche's this character.
return Token("LITERAL",re.escape(name[1]))
# If the name is in the lexer's token set, use that token
if name in lexer.tokens:
return lexer.tokens[name]
# Otherwise, we assume it's a Symbol. If it's not, then the Symbol
# regexp should catch it and barf, which is what we want.
return Symbol(name)
def _rd_parse_rule(rule,action,lexer,parse_table):
"""Recursive function to parse the input"""
input_values = []
for symbol in rule:
if symbol.terminal():
if symbol == EPSILON:
# epsilon-production - treat like input, but consume nothing
input_values.append(None)
continue
next = lexer.poll()
# if the wrong token is lexed:
if ( next.token != symbol or
(symbol.name == "LITERAL" and not symbol.match(next.match,0))
):
raise ParsingError('Expected {} but found {} on line {} at '
'position {}'.format( symbol.name, next.match,
next.line, next.position))
input_values.append(next.match)
continue
else:
#non-terminal symbol
# find the right derivation to follow
next = lexer.peek()
productions = parse_table[symbol][next.token]
if len(productions) == 0:
raise ParsingError('Unexpected input "{}" on line {} at '
'position {}'.format(next.match,next.line,next.position))
elif len(productions) > 1:
# TODO - more than one production means this isn't actually
# LL(1). In the future, we should automatically left-factor
# and eliminate recursion when possible, in which case this
# condition would be a definite error. However instead,
# we will simply choose the first production, but this is
# a BUG and should be fixed.
pass
new_rule,new_action = productions[0]
# RECURSION
input_values.append(_rd_parse_rule(new_rule,new_action,lexer,
parse_table))
continue
# Parsing complete, now perform the 'action'
if hasattr(action, '__call__'):
return action(input_values)
else:
return action
class _LexemeIterator:
"""Helper class to handle reading values from the ``Lexer``."""
def __init__(self,lexer,input):
self.n = lexer.lex(input)
self.next_symbol = None
self.scan()
def scan(self):
"Load the next lexeme"
try:
self.next_symbol = next(self.n)
except StopIteration:
def _false_iter():
while True:
yield Lexeme(EOF,"EOF",-1,-1)
self.n = _false_iter()
self.next_symbol = next(self.n)
def peek(self):
"""Return the next lexeme, but do not remove it from the input."""
return self.next_symbol
def poll(self):
"""Return and remove the next lexeme from the input."""
result = self.next_symbol
self.scan()
return result
def _update_follow(a,b):
"Add new elements EXCEPT EPSILON from b to a. Return True if not no-op."
b = b - {EPSILON}
if b - a:
a |= b
return True
return False
|
eblume/pcc
|
pcc/ll.py
|
Python
|
gpl-3.0
| 14,433
|
[
"GULP"
] |
6aefb19bfc23c451c33d30c4ab6985530ba99c7c0359f15f5127ec29bb9056dd
|
"""
Student Views
"""
import datetime
import logging
import uuid
import json
import warnings
from collections import defaultdict
from pytz import UTC
from requests import HTTPError
from ipware.ip import get_ip
from django.conf import settings
from django.contrib.auth import logout, authenticate, login
from django.contrib.auth.models import User, AnonymousUser
from django.contrib.auth.decorators import login_required
from django.contrib.auth.views import password_reset_confirm
from django.contrib import messages
from django.core.context_processors import csrf
from django.core import mail
from django.core.urlresolvers import reverse
from django.core.validators import validate_email, ValidationError
from django.db import IntegrityError, transaction
from django.http import (HttpResponse, HttpResponseBadRequest, HttpResponseForbidden,
HttpResponseServerError, Http404)
from django.shortcuts import redirect
from django.utils.translation import ungettext
from django.utils.http import base36_to_int
from django.utils.translation import ugettext as _, get_language
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_exempt, ensure_csrf_cookie
from django.views.decorators.http import require_POST, require_GET
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.template.response import TemplateResponse
from ratelimitbackend.exceptions import RateLimitException
from social.apps.django_app import utils as social_utils
from social.backends import oauth as social_oauth
from social.exceptions import AuthException, AuthAlreadyAssociated
from edxmako.shortcuts import render_to_response, render_to_string
from course_modes.models import CourseMode
from shoppingcart.api import order_history
from student.models import (
Registration, UserProfile,
PendingEmailChange, CourseEnrollment, CourseEnrollmentAttribute, unique_id_for_user,
CourseEnrollmentAllowed, UserStanding, LoginFailures,
create_comments_service_user, PasswordHistory, UserSignupSource,
DashboardConfiguration, LinkedInAddToProfileConfiguration, ManualEnrollmentAudit, ALLOWEDTOENROLL_TO_ENROLLED)
from student.forms import AccountCreationForm, PasswordResetFormNoActive
from verify_student.models import SoftwareSecurePhotoVerification # pylint: disable=import-error
from certificates.models import CertificateStatuses, certificate_status_for_student
from certificates.api import ( # pylint: disable=import-error
get_certificate_url,
has_html_certificates_enabled,
)
from xmodule.modulestore.django import modulestore
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from opaque_keys.edx.locator import CourseLocator
from xmodule.modulestore import ModuleStoreEnum
from collections import namedtuple
from courseware.courses import get_courses, sort_by_announcement, sort_by_start_date # pylint: disable=import-error
from courseware.access import has_access
from django_comment_common.models import Role
from external_auth.models import ExternalAuthMap
import external_auth.views
from external_auth.login_and_register import (
login as external_auth_login,
register as external_auth_register
)
from bulk_email.models import Optout, CourseAuthorization
from lang_pref import LANGUAGE_KEY
import track.views
import dogstats_wrapper as dog_stats_api
from util.db import commit_on_success_with_read_committed
from util.json_request import JsonResponse
from util.bad_request_rate_limiter import BadRequestRateLimiter
from util.milestones_helpers import (
get_pre_requisite_courses_not_completed,
)
from microsite_configuration import microsite
from util.password_policy_validators import (
validate_password_length, validate_password_complexity,
validate_password_dictionary
)
import third_party_auth
from third_party_auth import pipeline, provider
from student.helpers import (
check_verify_status_by_course,
auth_pipeline_urls, get_next_url_for_login_page
)
from student.cookies import set_logged_in_cookies, delete_logged_in_cookies
from student.models import anonymous_id_for_user, Tag, UserInterestingTag
from shoppingcart.models import DonationConfiguration, CourseRegistrationCode
from embargo import api as embargo_api
import analytics
from eventtracking import tracker
# Note that this lives in LMS, so this dependency should be refactored.
from notification_prefs.views import enable_notifications
# Note that this lives in openedx, so this dependency should be refactored.
from openedx.core.djangoapps.user_api.preferences import api as preferences_api
log = logging.getLogger("edx.student")
AUDIT_LOG = logging.getLogger("audit")
ReverifyInfo = namedtuple('ReverifyInfo', 'course_id course_name course_number date status display') # pylint: disable=invalid-name
SETTING_CHANGE_INITIATED = 'edx.user.settings.change_initiated'
def csrf_token(context):
"""A csrf token that can be included in a form."""
token = context.get('csrf_token', '')
if token == 'NOTPROVIDED':
return ''
return (u'<div style="display:none"><input type="hidden"'
' name="csrfmiddlewaretoken" value="%s" /></div>' % (token))
# NOTE: This view is not linked to directly--it is called from
# branding/views.py:index(), which is cached for anonymous users.
# This means that it should always return the same thing for anon
# users. (in particular, no switching based on query params allowed)
def index(request, extra_context=None, user=AnonymousUser()):
"""
Render the edX main page.
extra_context is used to allow immediate display of certain modal windows, eg signup,
as used by external_auth.
"""
if extra_context is None:
extra_context = {}
# The course selection work is done in courseware.courses.
domain = settings.FEATURES.get('FORCE_UNIVERSITY_DOMAIN') # normally False
# do explicit check, because domain=None is valid
if domain is False:
domain = request.META.get('HTTP_HOST')
courses = get_courses(user, domain=domain)
if microsite.get_value("ENABLE_COURSE_SORTING_BY_START_DATE",
settings.FEATURES["ENABLE_COURSE_SORTING_BY_START_DATE"]):
courses = sort_by_start_date(courses)
else:
courses = sort_by_announcement(courses)
context = {'courses': courses}
context.update(extra_context)
return render_to_response('index.html', context)
def process_survey_link(survey_link, user):
"""
If {UNIQUE_ID} appears in the link, replace it with a unique id for the user.
Currently, this is sha1(user.username). Otherwise, return survey_link.
"""
return survey_link.format(UNIQUE_ID=unique_id_for_user(user))
def cert_info(user, course_overview, course_mode):
"""
Get the certificate info needed to render the dashboard section for the given
student and course.
Arguments:
user (User): A user.
course_overview (CourseOverview): A course.
course_mode (str): The enrollment mode (honor, verified, audit, etc.)
Returns:
dict: A dictionary with keys:
'status': one of 'generating', 'ready', 'notpassing', 'processing', 'restricted'
'show_download_url': bool
'download_url': url, only present if show_download_url is True
'show_disabled_download_button': bool -- true if state is 'generating'
'show_survey_button': bool
'survey_url': url, only if show_survey_button is True
'grade': if status is not 'processing'
"""
if not course_overview.may_certify():
return {}
return _cert_info(
user,
course_overview,
certificate_status_for_student(user, course_overview.id),
course_mode
)
def reverification_info(statuses):
"""
Returns reverification-related information for *all* of user's enrollments whose
reverification status is in statuses.
Args:
statuses (list): a list of reverification statuses we want information for
example: ["must_reverify", "denied"]
Returns:
dictionary of lists: dictionary with one key per status, e.g.
dict["must_reverify"] = []
dict["must_reverify"] = [some information]
"""
reverifications = defaultdict(list)
# Sort the data by the reverification_end_date
for status in statuses:
if reverifications[status]:
reverifications[status].sort(key=lambda x: x.date)
return reverifications
def get_course_enrollments(user, org_to_include, orgs_to_exclude):
"""
Given a user, return a filtered set of his or her course enrollments.
Arguments:
user (User): the user in question.
org_to_include (str): for use in Microsites. If not None, ONLY courses
of this org will be returned.
orgs_to_exclude (list[str]): If org_to_include is not None, this
argument is ignored. Else, courses of this org will be excluded.
Returns:
generator[CourseEnrollment]: a sequence of enrollments to be displayed
on the user's dashboard.
"""
for enrollment in CourseEnrollment.enrollments_for_user(user):
# If the course is missing or broken, log an error and skip it.
course_overview = enrollment.course_overview
if not course_overview:
log.error(
"User %s enrolled in broken or non-existent course %s",
user.username,
enrollment.course_id
)
continue
# If we are in a Microsite, then filter out anything that is not
# attributed (by ORG) to that Microsite.
if org_to_include and course_overview.location.org != org_to_include:
continue
# Conversely, if we are not in a Microsite, then filter out any enrollments
# with courses attributed (by ORG) to Microsites.
elif course_overview.location.org in orgs_to_exclude:
continue
# Else, include the enrollment.
else:
yield enrollment
def _cert_info(user, course_overview, cert_status, course_mode): # pylint: disable=unused-argument
"""
Implements the logic for cert_info -- split out for testing.
Arguments:
user (User): A user.
course_overview (CourseOverview): A course.
course_mode (str): The enrollment mode (honor, verified, audit, etc.)
"""
# simplify the status for the template using this lookup table
template_state = {
CertificateStatuses.generating: 'generating',
CertificateStatuses.regenerating: 'generating',
CertificateStatuses.downloadable: 'ready',
CertificateStatuses.notpassing: 'notpassing',
CertificateStatuses.restricted: 'restricted',
}
default_status = 'processing'
default_info = {'status': default_status,
'show_disabled_download_button': False,
'show_download_url': False,
'show_survey_button': False,
}
if cert_status is None:
return default_info
is_hidden_status = cert_status['status'] in ('unavailable', 'processing', 'generating', 'notpassing')
if course_overview.certificates_display_behavior == 'early_no_info' and is_hidden_status:
return None
status = template_state.get(cert_status['status'], default_status)
status_dict = {
'status': status,
'show_download_url': status == 'ready',
'show_disabled_download_button': status == 'generating',
'mode': cert_status.get('mode', None),
'linked_in_url': None
}
if (status in ('generating', 'ready', 'notpassing', 'restricted') and
course_overview.end_of_course_survey_url is not None):
status_dict.update({
'show_survey_button': True,
'survey_url': process_survey_link(course_overview.end_of_course_survey_url, user)})
else:
status_dict['show_survey_button'] = False
if status == 'ready':
# showing the certificate web view button if certificate is ready state and feature flags are enabled.
if has_html_certificates_enabled(course_overview.id, course_overview):
if course_overview.has_any_active_web_certificate:
certificate_url = get_certificate_url(
user_id=user.id,
course_id=unicode(course_overview.id),
)
status_dict.update({
'show_cert_web_view': True,
'cert_web_view_url': u'{url}'.format(url=certificate_url)
})
else:
# don't show download certificate button if we don't have an active certificate for course
status_dict['show_download_url'] = False
elif 'download_url' not in cert_status:
log.warning(
u"User %s has a downloadable cert for %s, but no download url",
user.username,
course_overview.id
)
return default_info
else:
status_dict['download_url'] = cert_status['download_url']
# If enabled, show the LinkedIn "add to profile" button
# Clicking this button sends the user to LinkedIn where they
# can add the certificate information to their profile.
linkedin_config = LinkedInAddToProfileConfiguration.current()
if linkedin_config.enabled:
status_dict['linked_in_url'] = linkedin_config.add_to_profile_url(
course_overview.id,
course_overview.display_name,
cert_status.get('mode'),
cert_status['download_url']
)
if status in ('generating', 'ready', 'notpassing', 'restricted'):
if 'grade' not in cert_status:
# Note: as of 11/20/2012, we know there are students in this state-- cs169.1x,
# who need to be regraded (we weren't tracking 'notpassing' at first).
# We can add a log.warning here once we think it shouldn't happen.
return default_info
else:
status_dict['grade'] = cert_status['grade']
return status_dict
@ensure_csrf_cookie
def signin_user(request):
"""Deprecated. To be replaced by :class:`student_account.views.login_and_registration_form`."""
external_auth_response = external_auth_login(request)
if external_auth_response is not None:
return external_auth_response
# Determine the URL to redirect to following login:
redirect_to = get_next_url_for_login_page(request)
if request.user.is_authenticated():
return redirect(redirect_to)
third_party_auth_error = None
for msg in messages.get_messages(request):
if msg.extra_tags.split()[0] == "social-auth":
# msg may or may not be translated. Try translating [again] in case we are able to:
third_party_auth_error = _(unicode(msg)) # pylint: disable=translation-of-non-string
break
context = {
'login_redirect_url': redirect_to, # This gets added to the query string of the "Sign In" button in the header
# Bool injected into JS to submit form if we're inside a running third-
# party auth pipeline; distinct from the actual instance of the running
# pipeline, if any.
'pipeline_running': 'true' if pipeline.running(request) else 'false',
'pipeline_url': auth_pipeline_urls(pipeline.AUTH_ENTRY_LOGIN, redirect_url=redirect_to),
'platform_name': microsite.get_value(
'platform_name',
settings.PLATFORM_NAME
),
'third_party_auth_error': third_party_auth_error
}
return render_to_response('login.html', context)
@ensure_csrf_cookie
def register_user(request, extra_context=None):
"""Deprecated. To be replaced by :class:`student_account.views.login_and_registration_form`."""
# Determine the URL to redirect to following login:
redirect_to = get_next_url_for_login_page(request)
if request.user.is_authenticated():
return redirect(redirect_to)
external_auth_response = external_auth_register(request)
if external_auth_response is not None:
return external_auth_response
context = {
'login_redirect_url': redirect_to, # This gets added to the query string of the "Sign In" button in the header
'email': '',
'name': '',
'running_pipeline': None,
'pipeline_urls': auth_pipeline_urls(pipeline.AUTH_ENTRY_REGISTER, redirect_url=redirect_to),
'platform_name': microsite.get_value(
'platform_name',
settings.PLATFORM_NAME
),
'selected_provider': '',
'username': '',
}
if extra_context is not None:
context.update(extra_context)
if context.get("extauth_domain", '').startswith(external_auth.views.SHIBBOLETH_DOMAIN_PREFIX):
return render_to_response('register-shib.html', context)
# If third-party auth is enabled, prepopulate the form with data from the
# selected provider.
if third_party_auth.is_enabled() and pipeline.running(request):
running_pipeline = pipeline.get(request)
current_provider = provider.Registry.get_from_pipeline(running_pipeline)
overrides = current_provider.get_register_form_data(running_pipeline.get('kwargs'))
overrides['running_pipeline'] = running_pipeline
overrides['selected_provider'] = current_provider.name
context.update(overrides)
return render_to_response('register.html', context)
def complete_course_mode_info(course_id, enrollment, modes=None):
"""
We would like to compute some more information from the given course modes
and the user's current enrollment
Returns the given information:
- whether to show the course upsell information
- numbers of days until they can't upsell anymore
"""
if modes is None:
modes = CourseMode.modes_for_course_dict(course_id)
mode_info = {'show_upsell': False, 'days_for_upsell': None}
# we want to know if the user is already verified and if verified is an
# option
if 'verified' in modes and enrollment.mode != 'verified':
mode_info['show_upsell'] = True
# if there is an expiration date, find out how long from now it is
if modes['verified'].expiration_datetime:
today = datetime.datetime.now(UTC).date()
mode_info['days_for_upsell'] = (modes['verified'].expiration_datetime.date() - today).days
return mode_info
def is_course_blocked(request, redeemed_registration_codes, course_key):
"""Checking either registration is blocked or not ."""
blocked = False
for redeemed_registration in redeemed_registration_codes:
# registration codes may be generated via Bulk Purchase Scenario
# we have to check only for the invoice generated registration codes
# that their invoice is valid or not
if redeemed_registration.invoice_item:
if not getattr(redeemed_registration.invoice_item.invoice, 'is_valid'):
blocked = True
# disabling email notifications for unpaid registration courses
Optout.objects.get_or_create(user=request.user, course_id=course_key)
log.info(
u"User %s (%s) opted out of receiving emails from course %s",
request.user.username,
request.user.email,
course_key
)
track.views.server_track(request, "change-email1-settings", {"receive_emails": "no", "course": course_key.to_deprecated_string()}, page='dashboard')
break
return blocked
@login_required
@ensure_csrf_cookie
def dashboard(request):
user = request.user
platform_name = microsite.get_value("platform_name", settings.PLATFORM_NAME)
# for microsites, we want to filter and only show enrollments for courses within
# the microsites 'ORG'
course_org_filter = microsite.get_value('course_org_filter')
# Let's filter out any courses in an "org" that has been declared to be
# in a Microsite
org_filter_out_set = microsite.get_all_orgs()
# remove our current Microsite from the "filter out" list, if applicable
if course_org_filter:
org_filter_out_set.remove(course_org_filter)
# Build our (course, enrollment) list for the user, but ignore any courses that no
# longer exist (because the course IDs have changed). Still, we don't delete those
# enrollments, because it could have been a data push snafu.
course_enrollments = list(get_course_enrollments(user, course_org_filter, org_filter_out_set))
# sort the enrollment pairs by the enrollment date
course_enrollments.sort(key=lambda x: x.created, reverse=True)
# Retrieve the course modes for each course
enrolled_course_ids = [enrollment.course_id for enrollment in course_enrollments]
__, unexpired_course_modes = CourseMode.all_and_unexpired_modes_for_courses(enrolled_course_ids)
course_modes_by_course = {
course_id: {
mode.slug: mode
for mode in modes
}
for course_id, modes in unexpired_course_modes.iteritems()
}
# Check to see if the student has recently enrolled in a course.
# If so, display a notification message confirming the enrollment.
enrollment_message = _create_recent_enrollment_message(
course_enrollments, course_modes_by_course
)
course_optouts = Optout.objects.filter(user=user).values_list('course_id', flat=True)
message = ""
if not user.is_active:
message = render_to_string(
'registration/activate_account_notice.html',
{'email': user.email, 'platform_name': platform_name}
)
# Global staff can see what courses errored on their dashboard
staff_access = False
errored_courses = {}
if has_access(user, 'staff', 'global'):
# Show any courses that errored on load
staff_access = True
errored_courses = modulestore().get_errored_courses()
show_courseware_links_for = frozenset(
enrollment.course_id for enrollment in course_enrollments
if has_access(request.user, 'load', enrollment.course_overview)
and has_access(request.user, 'view_courseware_with_prerequisites', enrollment.course_overview)
)
# Construct a dictionary of course mode information
# used to render the course list. We re-use the course modes dict
# we loaded earlier to avoid hitting the database.
course_mode_info = {
enrollment.course_id: complete_course_mode_info(
enrollment.course_id, enrollment,
modes=course_modes_by_course[enrollment.course_id]
)
for enrollment in course_enrollments
}
# Determine the per-course verification status
# This is a dictionary in which the keys are course locators
# and the values are one of:
#
# VERIFY_STATUS_NEED_TO_VERIFY
# VERIFY_STATUS_SUBMITTED
# VERIFY_STATUS_APPROVED
# VERIFY_STATUS_MISSED_DEADLINE
#
# Each of which correspond to a particular message to display
# next to the course on the dashboard.
#
# If a course is not included in this dictionary,
# there is no verification messaging to display.
verify_status_by_course = check_verify_status_by_course(user, course_enrollments)
cert_statuses = {
enrollment.course_id: cert_info(request.user, enrollment.course_overview, enrollment.mode)
for enrollment in course_enrollments
}
# only show email settings for Mongo course and when bulk email is turned on
show_email_settings_for = frozenset(
enrollment.course_id for enrollment in course_enrollments if (
settings.FEATURES['ENABLE_INSTRUCTOR_EMAIL'] and
modulestore().get_modulestore_type(enrollment.course_id) != ModuleStoreEnum.Type.xml and
CourseAuthorization.instructor_email_enabled(enrollment.course_id)
)
)
# Verification Attempts
# Used to generate the "you must reverify for course x" banner
verification_status, verification_msg = SoftwareSecurePhotoVerification.user_status(user)
# Gets data for midcourse reverifications, if any are necessary or have failed
statuses = ["approved", "denied", "pending", "must_reverify"]
reverifications = reverification_info(statuses)
show_refund_option_for = frozenset(
enrollment.course_id for enrollment in course_enrollments
if enrollment.refundable()
)
block_courses = frozenset(
enrollment.course_id for enrollment in course_enrollments
if is_course_blocked(
request,
CourseRegistrationCode.objects.filter(
course_id=enrollment.course_id,
registrationcoderedemption__redeemed_by=request.user
),
enrollment.course_id
)
)
enrolled_courses_either_paid = frozenset(
enrollment.course_id for enrollment in course_enrollments
if enrollment.is_paid_course()
)
# If there are *any* denied reverifications that have not been toggled off,
# we'll display the banner
denied_banner = any(item.display for item in reverifications["denied"])
# Populate the Order History for the side-bar.
order_history_list = order_history(user, course_org_filter=course_org_filter, org_filter_out_set=org_filter_out_set)
# get list of courses having pre-requisites yet to be completed
courses_having_prerequisites = frozenset(
enrollment.course_id for enrollment in course_enrollments
if enrollment.course_overview.pre_requisite_courses
)
courses_requirements_not_met = get_pre_requisite_courses_not_completed(user, courses_having_prerequisites)
if 'notlive' in request.GET:
redirect_message = _("The course you are looking for does not start until {date}.").format(
date=request.GET['notlive']
)
else:
redirect_message = ''
context = {
'enrollment_message': enrollment_message,
'redirect_message': redirect_message,
'course_enrollments': course_enrollments,
'course_optouts': course_optouts,
'message': message,
'staff_access': staff_access,
'errored_courses': errored_courses,
'show_courseware_links_for': show_courseware_links_for,
'all_course_modes': course_mode_info,
'cert_statuses': cert_statuses,
'credit_statuses': _credit_statuses(user, course_enrollments),
'show_email_settings_for': show_email_settings_for,
'reverifications': reverifications,
'verification_status': verification_status,
'verification_status_by_course': verify_status_by_course,
'verification_msg': verification_msg,
'show_refund_option_for': show_refund_option_for,
'block_courses': block_courses,
'denied_banner': denied_banner,
'billing_email': settings.PAYMENT_SUPPORT_EMAIL,
'user': user,
'logout_url': reverse(logout_user),
'platform_name': platform_name,
'enrolled_courses_either_paid': enrolled_courses_either_paid,
'provider_states': [],
'order_history_list': order_history_list,
'courses_requirements_not_met': courses_requirements_not_met,
'nav_hidden': True,
}
return render_to_response('dashboard.html', context)
def _create_recent_enrollment_message(course_enrollments, course_modes): # pylint: disable=invalid-name
"""
Builds a recent course enrollment message.
Constructs a new message template based on any recent course enrollments
for the student.
Args:
course_enrollments (list[CourseEnrollment]): a list of course enrollments.
course_modes (dict): Mapping of course ID's to course mode dictionaries.
Returns:
A string representing the HTML message output from the message template.
None if there are no recently enrolled courses.
"""
recently_enrolled_courses = _get_recently_enrolled_courses(course_enrollments)
if recently_enrolled_courses:
messages = [
{
"course_id": enrollment.course_overview.id,
"course_name": enrollment.course_overview.display_name,
"allow_donation": _allow_donation(course_modes, enrollment.course_overview.id, enrollment)
}
for enrollment in recently_enrolled_courses
]
platform_name = microsite.get_value('platform_name', settings.PLATFORM_NAME)
return render_to_string(
'enrollment/course_enrollment_message.html',
{'course_enrollment_messages': messages, 'platform_name': platform_name}
)
def _get_recently_enrolled_courses(course_enrollments):
"""
Given a list of enrollments, filter out all but recent enrollments.
Args:
course_enrollments (list[CourseEnrollment]): A list of course enrollments.
Returns:
list[CourseEnrollment]: A list of recent course enrollments.
"""
seconds = DashboardConfiguration.current().recent_enrollment_time_delta
time_delta = (datetime.datetime.now(UTC) - datetime.timedelta(seconds=seconds))
return [
enrollment for enrollment in course_enrollments
# If the enrollment has no created date, we are explicitly excluding the course
# from the list of recent enrollments.
if enrollment.is_active and enrollment.created > time_delta
]
def _allow_donation(course_modes, course_id, enrollment):
"""Determines if the dashboard will request donations for the given course.
Check if donations are configured for the platform, and if the current course is accepting donations.
Args:
course_modes (dict): Mapping of course ID's to course mode dictionaries.
course_id (str): The unique identifier for the course.
enrollment(CourseEnrollment): The enrollment object in which the user is enrolled
Returns:
True if the course is allowing donations.
"""
donations_enabled = DonationConfiguration.current().enabled
return donations_enabled and enrollment.mode in course_modes[course_id] and course_modes[course_id][enrollment.mode].min_price == 0
def _update_email_opt_in(request, org):
"""Helper function used to hit the profile API if email opt-in is enabled."""
email_opt_in = request.POST.get('email_opt_in')
if email_opt_in is not None:
email_opt_in_boolean = email_opt_in == 'true'
preferences_api.update_email_opt_in(request.user, org, email_opt_in_boolean)
def _credit_statuses(user, course_enrollments):
"""
Retrieve the status for credit courses.
A credit course is a course for which a user can purchased
college credit. The current flow is:
1. User becomes eligible for credit (submits verifications, passes the course, etc.)
2. User purchases credit from a particular credit provider.
3. User requests credit from the provider, usually creating an account on the provider's site.
4. The credit provider notifies us whether the user's request for credit has been accepted or rejected.
The dashboard is responsible for communicating the user's state in this flow.
Arguments:
user (User): The currently logged-in user.
course_enrollments (list[CourseEnrollment]): List of enrollments for the
user.
Returns: dict
The returned dictionary has keys that are `CourseKey`s and values that
are dictionaries with:
* eligible (bool): True if the user is eligible for credit in this course.
* deadline (datetime): The deadline for purchasing and requesting credit for this course.
* purchased (bool): Whether the user has purchased credit for this course.
* provider_name (string): The display name of the credit provider.
* provider_status_url (string): A URL the user can visit to check on their credit request status.
* request_status (string): Either "pending", "approved", or "rejected"
* error (bool): If true, an unexpected error occurred when retrieving the credit status,
so the user should contact the support team.
Example:
>>> _credit_statuses(user, course_enrollments)
{
CourseKey.from_string("edX/DemoX/Demo_Course"): {
"course_key": "edX/DemoX/Demo_Course",
"eligible": True,
"deadline": 2015-11-23 00:00:00 UTC,
"purchased": True,
"provider_name": "Hogwarts",
"provider_status_url": "http://example.com/status",
"request_status": "pending",
"error": False
}
}
"""
from openedx.core.djangoapps.credit import api as credit_api
# Feature flag off
if not settings.FEATURES.get("ENABLE_CREDIT_ELIGIBILITY"):
return {}
request_status_by_course = {
request["course_key"]: request["status"]
for request in credit_api.get_credit_requests_for_user(user.username)
}
credit_enrollments = {
enrollment.course_id: enrollment
for enrollment in course_enrollments
if enrollment.mode == "credit"
}
# When a user purchases credit in a course, the user's enrollment
# mode is set to "credit" and an enrollment attribute is set
# with the ID of the credit provider. We retrieve *all* such attributes
# here to minimize the number of database queries.
purchased_credit_providers = {
attribute.enrollment.course_id: attribute.value
for attribute in CourseEnrollmentAttribute.objects.filter(
namespace="credit",
name="provider_id",
enrollment__in=credit_enrollments.values()
).select_related("enrollment")
}
provider_info_by_id = {
provider["id"]: provider
for provider in credit_api.get_credit_providers()
}
statuses = {}
for eligibility in credit_api.get_eligibilities_for_user(user.username):
course_key = CourseKey.from_string(unicode(eligibility["course_key"]))
status = {
"course_key": unicode(course_key),
"eligible": True,
"deadline": eligibility["deadline"],
"purchased": course_key in credit_enrollments,
"provider_name": None,
"provider_status_url": None,
"provider_id": None,
"request_status": request_status_by_course.get(course_key),
"error": False,
}
# If the user has purchased credit, then include information about the credit
# provider from which the user purchased credit.
# We retrieve the provider's ID from the an "enrollment attribute" set on the user's
# enrollment when the user's order for credit is fulfilled by the E-Commerce service.
if status["purchased"]:
provider_id = purchased_credit_providers.get(course_key)
if provider_id is None:
status["error"] = True
log.error(
u"Could not find credit provider associated with credit enrollment "
u"for user %s in course %s. The user will not be able to see his or her "
u"credit request status on the student dashboard. This attribute should "
u"have been set when the user purchased credit in the course.",
user.id, course_key
)
else:
provider_info = provider_info_by_id.get(provider_id, {})
status["provider_name"] = provider_info.get("display_name")
status["provider_status_url"] = provider_info.get("status_url")
status["provider_id"] = provider_id
statuses[course_key] = status
return statuses
@require_POST
@commit_on_success_with_read_committed
def change_enrollment(request, check_access=True):
"""
Modify the enrollment status for the logged-in user.
The request parameter must be a POST request (other methods return 405)
that specifies course_id and enrollment_action parameters. If course_id or
enrollment_action is not specified, if course_id is not valid, if
enrollment_action is something other than "enroll" or "unenroll", if
enrollment_action is "enroll" and enrollment is closed for the course, or
if enrollment_action is "unenroll" and the user is not enrolled in the
course, a 400 error will be returned. If the user is not logged in, 403
will be returned; it is important that only this case return 403 so the
front end can redirect the user to a registration or login page when this
happens. This function should only be called from an AJAX request, so
the error messages in the responses should never actually be user-visible.
Args:
request (`Request`): The Django request object
Keyword Args:
check_access (boolean): If True, we check that an accessible course actually
exists for the given course_key before we enroll the student.
The default is set to False to avoid breaking legacy code or
code with non-standard flows (ex. beta tester invitations), but
for any standard enrollment flow you probably want this to be True.
Returns:
Response
"""
# Get the user
user = request.user
# Ensure the user is authenticated
if not user.is_authenticated():
return HttpResponseForbidden()
# Ensure we received a course_id
action = request.POST.get("enrollment_action")
if 'course_id' not in request.POST:
return HttpResponseBadRequest(_("Course id not specified"))
try:
course_id = SlashSeparatedCourseKey.from_deprecated_string(request.POST.get("course_id"))
except InvalidKeyError:
log.warning(
u"User %s tried to %s with invalid course id: %s",
user.username,
action,
request.POST.get("course_id"),
)
return HttpResponseBadRequest(_("Invalid course id"))
if action == "enroll":
# Make sure the course exists
# We don't do this check on unenroll, or a bad course id can't be unenrolled from
if not modulestore().has_course(course_id):
log.warning(
u"User %s tried to enroll in non-existent course %s",
user.username,
course_id
)
return HttpResponseBadRequest(_("Course id is invalid"))
# Record the user's email opt-in preference
if settings.FEATURES.get('ENABLE_MKTG_EMAIL_OPT_IN'):
_update_email_opt_in(request, course_id.org)
available_modes = CourseMode.modes_for_course_dict(course_id)
# Check whether the user is blocked from enrolling in this course
# This can occur if the user's IP is on a global blacklist
# or if the user is enrolling in a country in which the course
# is not available.
redirect_url = embargo_api.redirect_if_blocked(
course_id, user=user, ip_address=get_ip(request),
url=request.path
)
if redirect_url:
return HttpResponse(redirect_url)
# Check that auto enrollment is allowed for this course
# (= the course is NOT behind a paywall)
if CourseMode.can_auto_enroll(course_id):
# Enroll the user using the default mode (honor)
# We're assuming that users of the course enrollment table
# will NOT try to look up the course enrollment model
# by its slug. If they do, it's possible (based on the state of the database)
# for no such model to exist, even though we've set the enrollment type
# to "honor".
try:
CourseEnrollment.enroll(user, course_id, check_access=check_access)
except Exception:
return HttpResponseBadRequest(_("Could not enroll"))
# If we have more than one course mode or professional ed is enabled,
# then send the user to the choose your track page.
# (In the case of no-id-professional/professional ed, this will redirect to a page that
# funnels users directly into the verification / payment flow)
if CourseMode.has_verified_mode(available_modes) or CourseMode.has_professional_mode(available_modes):
return HttpResponse(
reverse("course_modes_choose", kwargs={'course_id': unicode(course_id)})
)
# Otherwise, there is only one mode available (the default)
return HttpResponse()
elif action == "unenroll":
if not CourseEnrollment.is_enrolled(user, course_id):
return HttpResponseBadRequest(_("You are not enrolled in this course"))
CourseEnrollment.unenroll(user, course_id)
return HttpResponse()
else:
return HttpResponseBadRequest(_("Enrollment action is invalid"))
@never_cache
@ensure_csrf_cookie
def accounts_login(request):
"""Deprecated. To be replaced by :class:`student_account.views.login_and_registration_form`."""
external_auth_response = external_auth_login(request)
if external_auth_response is not None:
return external_auth_response
redirect_to = get_next_url_for_login_page(request)
context = {
'login_redirect_url': redirect_to,
'pipeline_running': 'false',
'pipeline_url': auth_pipeline_urls(pipeline.AUTH_ENTRY_LOGIN, redirect_url=redirect_to),
'platform_name': settings.PLATFORM_NAME,
}
return render_to_response('login.html', context)
# Need different levels of logging
@ensure_csrf_cookie
def login_user(request, error=""): # pylint: disable=too-many-statements,unused-argument
"""AJAX request to log in the user."""
backend_name = None
email = None
password = None
redirect_url = None
response = None
running_pipeline = None
third_party_auth_requested = third_party_auth.is_enabled() and pipeline.running(request)
third_party_auth_successful = False
trumped_by_first_party_auth = bool(request.POST.get('email')) or bool(request.POST.get('password'))
user = None
if third_party_auth_requested and not trumped_by_first_party_auth:
# The user has already authenticated via third-party auth and has not
# asked to do first party auth by supplying a username or password. We
# now want to put them through the same logging and cookie calculation
# logic as with first-party auth.
running_pipeline = pipeline.get(request)
username = running_pipeline['kwargs'].get('username')
backend_name = running_pipeline['backend']
third_party_uid = running_pipeline['kwargs']['uid']
requested_provider = provider.Registry.get_from_pipeline(running_pipeline)
try:
user = pipeline.get_authenticated_user(requested_provider, username, third_party_uid)
third_party_auth_successful = True
except User.DoesNotExist:
AUDIT_LOG.warning(
u'Login failed - user with username {username} has no social auth with backend_name {backend_name}'.format(
username=username, backend_name=backend_name))
return HttpResponse(
_("You've successfully logged into your {provider_name} account, but this account isn't linked with an {platform_name} account yet.").format(
platform_name=settings.PLATFORM_NAME, provider_name=requested_provider.name
)
+ "<br/><br/>" +
_("Use your {platform_name} username and password to log into {platform_name} below, "
"and then link your {platform_name} account with {provider_name} from your dashboard.").format(
platform_name=settings.PLATFORM_NAME, provider_name=requested_provider.name
)
+ "<br/><br/>" +
_("If you don't have an {platform_name} account yet, "
"click <strong>Register</strong> at the top of the page.").format(
platform_name=settings.PLATFORM_NAME),
content_type="text/plain",
status=403
)
else:
if 'email' not in request.POST or 'password' not in request.POST:
return JsonResponse({
"success": False,
"value": _('There was an error receiving your login information. Please email us.'), # TODO: User error message
}) # TODO: this should be status code 400 # pylint: disable=fixme
email = request.POST['email']
password = request.POST['password']
try:
user = User.objects.get(email=email)
except User.DoesNotExist:
if settings.FEATURES['SQUELCH_PII_IN_LOGS']:
AUDIT_LOG.warning(u"Login failed - Unknown user email")
else:
AUDIT_LOG.warning(u"Login failed - Unknown user email: {0}".format(email))
# check if the user has a linked shibboleth account, if so, redirect the user to shib-login
# This behavior is pretty much like what gmail does for shibboleth. Try entering some @stanford.edu
# address into the Gmail login.
if settings.FEATURES.get('AUTH_USE_SHIB') and user:
try:
eamap = ExternalAuthMap.objects.get(user=user)
if eamap.external_domain.startswith(external_auth.views.SHIBBOLETH_DOMAIN_PREFIX):
return JsonResponse({
"success": False,
"redirect": reverse('shib-login'),
}) # TODO: this should be status code 301 # pylint: disable=fixme
except ExternalAuthMap.DoesNotExist:
# This is actually the common case, logging in user without external linked login
AUDIT_LOG.info(u"User %s w/o external auth attempting login", user)
# see if account has been locked out due to excessive login failures
user_found_by_email_lookup = user
if user_found_by_email_lookup and LoginFailures.is_feature_enabled():
if LoginFailures.is_user_locked_out(user_found_by_email_lookup):
return JsonResponse({
"success": False,
"value": _('This account has been temporarily locked due to excessive login failures. Try again later.'),
}) # TODO: this should be status code 429 # pylint: disable=fixme
# see if the user must reset his/her password due to any policy settings
if user_found_by_email_lookup and PasswordHistory.should_user_reset_password_now(user_found_by_email_lookup):
return JsonResponse({
"success": False,
"value": _('Your password has expired due to password policy on this account. You must '
'reset your password before you can log in again. Please click the '
'"Forgot Password" link on this page to reset your password before logging in again.'),
}) # TODO: this should be status code 403 # pylint: disable=fixme
# if the user doesn't exist, we want to set the username to an invalid
# username so that authentication is guaranteed to fail and we can take
# advantage of the ratelimited backend
username = user.username if user else ""
if not third_party_auth_successful:
try:
user = authenticate(username=username, password=password, request=request)
# this occurs when there are too many attempts from the same IP address
except RateLimitException:
return JsonResponse({
"success": False,
"value": _('Too many failed login attempts. Try again later.'),
}) # TODO: this should be status code 429 # pylint: disable=fixme
if user is None:
# tick the failed login counters if the user exists in the database
if user_found_by_email_lookup and LoginFailures.is_feature_enabled():
LoginFailures.increment_lockout_counter(user_found_by_email_lookup)
# if we didn't find this username earlier, the account for this email
# doesn't exist, and doesn't have a corresponding password
if username != "":
if settings.FEATURES['SQUELCH_PII_IN_LOGS']:
loggable_id = user_found_by_email_lookup.id if user_found_by_email_lookup else "<unknown>"
AUDIT_LOG.warning(u"Login failed - password for user.id: {0} is invalid".format(loggable_id))
else:
AUDIT_LOG.warning(u"Login failed - password for {0} is invalid".format(email))
return JsonResponse({
"success": False,
"value": _('Email or password is incorrect.'),
}) # TODO: this should be status code 400 # pylint: disable=fixme
# successful login, clear failed login attempts counters, if applicable
if LoginFailures.is_feature_enabled():
LoginFailures.clear_lockout_counter(user)
# Track the user's sign in
if settings.FEATURES.get('SEGMENT_IO_LMS') and hasattr(settings, 'SEGMENT_IO_LMS_KEY'):
tracking_context = tracker.get_tracker().resolve_context()
analytics.identify(user.id, {
'email': email,
'username': username,
})
analytics.track(
user.id,
"edx.bi.user.account.authenticated",
{
'category': "conversion",
'label': request.POST.get('course_id'),
'provider': None
},
context={
'Google Analytics': {
'clientId': tracking_context.get('client_id')
}
}
)
if user is not None and user.is_active:
try:
# We do not log here, because we have a handler registered
# to perform logging on successful logins.
login(request, user)
if request.POST.get('remember') == 'true':
request.session.set_expiry(604800)
log.debug("Setting user session to never expire")
else:
request.session.set_expiry(0)
except Exception as exc: # pylint: disable=broad-except
AUDIT_LOG.critical("Login failed - Could not create session. Is memcached running?")
log.critical("Login failed - Could not create session. Is memcached running?")
log.exception(exc)
raise
redirect_url = None # The AJAX method calling should know the default destination upon success
if third_party_auth_successful:
redirect_url = pipeline.get_complete_url(backend_name)
response = JsonResponse({
"success": True,
"redirect_url": redirect_url,
})
# Ensure that the external marketing site can
# detect that the user is logged in.
return set_logged_in_cookies(request, response, user)
if settings.FEATURES['SQUELCH_PII_IN_LOGS']:
AUDIT_LOG.warning(u"Login failed - Account not active for user.id: {0}, resending activation".format(user.id))
else:
AUDIT_LOG.warning(u"Login failed - Account not active for user {0}, resending activation".format(username))
reactivation_email_for_user(user)
not_activated_msg = _("This account has not been activated. We have sent another activation message. Please check your email for the activation instructions.")
return JsonResponse({
"success": False,
"value": not_activated_msg,
}) # TODO: this should be status code 400 # pylint: disable=fixme
@csrf_exempt
@require_POST
@social_utils.strategy("social:complete")
def login_oauth_token(request, backend):
"""
Authenticate the client using an OAuth access token by using the token to
retrieve information from a third party and matching that information to an
existing user.
"""
warnings.warn("Please use AccessTokenExchangeView instead.", DeprecationWarning)
backend = request.backend
if isinstance(backend, social_oauth.BaseOAuth1) or isinstance(backend, social_oauth.BaseOAuth2):
if "access_token" in request.POST:
# Tell third party auth pipeline that this is an API call
request.session[pipeline.AUTH_ENTRY_KEY] = pipeline.AUTH_ENTRY_LOGIN_API
user = None
try:
user = backend.do_auth(request.POST["access_token"])
except (HTTPError, AuthException):
pass
# do_auth can return a non-User object if it fails
if user and isinstance(user, User):
login(request, user)
return JsonResponse(status=204)
else:
# Ensure user does not re-enter the pipeline
request.social_strategy.clean_partial_pipeline()
return JsonResponse({"error": "invalid_token"}, status=401)
else:
return JsonResponse({"error": "invalid_request"}, status=400)
raise Http404
@ensure_csrf_cookie
def logout_user(request):
"""
HTTP request to log out the user. Redirects to marketing page.
Deletes both the CSRF and sessionid cookies so the marketing
site can determine the logged in state of the user
"""
# We do not log here, because we have a handler registered
# to perform logging on successful logouts.
logout(request)
if settings.FEATURES.get('AUTH_USE_CAS'):
target = reverse('cas-logout')
else:
target = '/'
response = redirect(target)
delete_logged_in_cookies(response)
return response
@require_GET
@login_required
@ensure_csrf_cookie
def manage_user_standing(request):
"""
Renders the view used to manage user standing. Also displays a table
of user accounts that have been disabled and who disabled them.
"""
if not request.user.is_staff:
raise Http404
all_disabled_accounts = UserStanding.objects.filter(
account_status=UserStanding.ACCOUNT_DISABLED
)
all_disabled_users = [standing.user for standing in all_disabled_accounts]
headers = ['username', 'account_changed_by']
rows = []
for user in all_disabled_users:
row = [user.username, user.standing.all()[0].changed_by]
rows.append(row)
context = {'headers': headers, 'rows': rows}
return render_to_response("manage_user_standing.html", context)
@require_POST
@login_required
@ensure_csrf_cookie
def disable_account_ajax(request):
"""
Ajax call to change user standing. Endpoint of the form
in manage_user_standing.html
"""
if not request.user.is_staff:
raise Http404
username = request.POST.get('username')
context = {}
if username is None or username.strip() == '':
context['message'] = _('Please enter a username')
return JsonResponse(context, status=400)
account_action = request.POST.get('account_action')
if account_action is None:
context['message'] = _('Please choose an option')
return JsonResponse(context, status=400)
username = username.strip()
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
context['message'] = _("User with username {} does not exist").format(username)
return JsonResponse(context, status=400)
else:
user_account, _success = UserStanding.objects.get_or_create(
user=user, defaults={'changed_by': request.user},
)
if account_action == 'disable':
user_account.account_status = UserStanding.ACCOUNT_DISABLED
context['message'] = _("Successfully disabled {}'s account").format(username)
log.info(u"%s disabled %s's account", request.user, username)
elif account_action == 'reenable':
user_account.account_status = UserStanding.ACCOUNT_ENABLED
context['message'] = _("Successfully reenabled {}'s account").format(username)
log.info(u"%s reenabled %s's account", request.user, username)
else:
context['message'] = _("Unexpected account status")
return JsonResponse(context, status=400)
user_account.changed_by = request.user
user_account.standing_last_changed_at = datetime.datetime.now(UTC)
user_account.save()
return JsonResponse(context)
@login_required
@ensure_csrf_cookie
def change_setting(request):
"""JSON call to change a profile setting: Right now, location"""
# TODO (vshnayder): location is no longer used
u_prof = UserProfile.objects.get(user=request.user) # request.user.profile_cache
if 'location' in request.POST:
u_prof.location = request.POST['location']
u_prof.save()
return JsonResponse({
"success": True,
"location": u_prof.location,
})
class AccountValidationError(Exception):
def __init__(self, message, field):
super(AccountValidationError, self).__init__(message)
self.field = field
@receiver(post_save, sender=User)
def user_signup_handler(sender, **kwargs): # pylint: disable=unused-argument
"""
handler that saves the user Signup Source
when the user is created
"""
if 'created' in kwargs and kwargs['created']:
site = microsite.get_value('SITE_NAME')
if site:
user_signup_source = UserSignupSource(user=kwargs['instance'], site=site)
user_signup_source.save()
log.info(u'user {} originated from a white labeled "Microsite"'.format(kwargs['instance'].id))
def _do_create_account(form):
"""
Given cleaned post variables, create the User and UserProfile objects, as well as the
registration for this user.
Returns a tuple (User, UserProfile, Registration).
Note: this function is also used for creating test users.
"""
if not form.is_valid():
raise ValidationError(form.errors)
user = User(
username=form.cleaned_data["username"],
email=form.cleaned_data["email"],
is_active=False
)
user.set_password(form.cleaned_data["password"])
registration = Registration()
# TODO: Rearrange so that if part of the process fails, the whole process fails.
# Right now, we can have e.g. no registration e-mail sent out and a zombie account
try:
user.save()
except IntegrityError:
# Figure out the cause of the integrity error
if len(User.objects.filter(username=user.username)) > 0:
raise AccountValidationError(
_("An account with the Public Username '{username}' already exists.").format(username=user.username),
field="username"
)
elif len(User.objects.filter(email=user.email)) > 0:
raise AccountValidationError(
_("An account with the Email '{email}' already exists.").format(email=user.email),
field="email"
)
else:
raise
# add this account creation to password history
# NOTE, this will be a NOP unless the feature has been turned on in configuration
password_history_entry = PasswordHistory()
password_history_entry.create(user)
registration.register(user)
profile_fields = [
"name", "level_of_education", "gender", "mailing_address", "city", "country", "goals",
"year_of_birth"
]
profile = UserProfile(
user=user,
**{key: form.cleaned_data.get(key) for key in profile_fields}
)
extended_profile = form.cleaned_extended_profile
if extended_profile:
profile.meta = json.dumps(extended_profile)
try:
profile.save()
except Exception: # pylint: disable=broad-except
log.exception("UserProfile creation failed for user {id}.".format(id=user.id))
raise
return (user, profile, registration)
def create_account_with_params(request, params):
"""
Given a request and a dict of parameters (which may or may not have come
from the request), create an account for the requesting user, including
creating a comments service user object and sending an activation email.
This also takes external/third-party auth into account, updates that as
necessary, and authenticates the user for the request's session.
Does not return anything.
Raises AccountValidationError if an account with the username or email
specified by params already exists, or ValidationError if any of the given
parameters is invalid for any other reason.
Issues with this code:
* It is not transactional. If there is a failure part-way, an incomplete
account will be created and left in the database.
* Third-party auth passwords are not verified. There is a comment that
they are unused, but it would be helpful to have a sanity check that
they are sane.
* It is over 300 lines long (!) and includes disprate functionality, from
registration e-mails to all sorts of other things. It should be broken
up into semantically meaningful functions.
* The user-facing text is rather unfriendly (e.g. "Username must be a
minimum of two characters long" rather than "Please use a username of
at least two characters").
"""
# Copy params so we can modify it; we can't just do dict(params) because if
# params is request.POST, that results in a dict containing lists of values
params = dict(params.items())
# allow for microsites to define their own set of required/optional/hidden fields
extra_fields = microsite.get_value(
'REGISTRATION_EXTRA_FIELDS',
getattr(settings, 'REGISTRATION_EXTRA_FIELDS', {})
)
# Boolean of whether a 3rd party auth provider and credentials were provided in
# the API so the newly created account can link with the 3rd party account.
#
# Note: this is orthogonal to the 3rd party authentication pipeline that occurs
# when the account is created via the browser and redirect URLs.
should_link_with_social_auth = third_party_auth.is_enabled() and 'provider' in params
if should_link_with_social_auth or (third_party_auth.is_enabled() and pipeline.running(request)):
params["password"] = pipeline.make_random_password()
# if doing signup for an external authorization, then get email, password, name from the eamap
# don't use the ones from the form, since the user could have hacked those
# unless originally we didn't get a valid email or name from the external auth
# TODO: We do not check whether these values meet all necessary criteria, such as email length
do_external_auth = 'ExternalAuthMap' in request.session
if do_external_auth:
eamap = request.session['ExternalAuthMap']
try:
validate_email(eamap.external_email)
params["email"] = eamap.external_email
except ValidationError:
pass
if eamap.external_name.strip() != '':
params["name"] = eamap.external_name
params["password"] = eamap.internal_password
log.debug(u'In create_account with external_auth: user = %s, email=%s', params["name"], params["email"])
extended_profile_fields = microsite.get_value('extended_profile_fields', [])
enforce_password_policy = (
settings.FEATURES.get("ENFORCE_PASSWORD_POLICY", False) and
not do_external_auth
)
# Can't have terms of service for certain SHIB users, like at Stanford
tos_required = (
not settings.FEATURES.get("AUTH_USE_SHIB") or
not settings.FEATURES.get("SHIB_DISABLE_TOS") or
not do_external_auth or
not eamap.external_domain.startswith(
external_auth.views.SHIBBOLETH_DOMAIN_PREFIX
)
)
form = AccountCreationForm(
data=params,
extra_fields=extra_fields,
extended_profile_fields=extended_profile_fields,
enforce_username_neq_password=True,
enforce_password_policy=enforce_password_policy,
tos_required=tos_required,
)
# Perform operations within a transaction that are critical to account creation
with transaction.commit_on_success():
# first, create the account
(user, profile, registration) = _do_create_account(form)
# next, link the account with social auth, if provided via the API.
# (If the user is using the normal register page, the social auth pipeline does the linking, not this code)
if should_link_with_social_auth:
backend_name = params['provider']
request.social_strategy = social_utils.load_strategy(request)
redirect_uri = reverse('social:complete', args=(backend_name, ))
request.backend = social_utils.load_backend(request.social_strategy, backend_name, redirect_uri)
social_access_token = params.get('access_token')
if not social_access_token:
raise ValidationError({
'access_token': [
_("An access_token is required when passing value ({}) for provider.").format(
params['provider']
)
]
})
request.session[pipeline.AUTH_ENTRY_KEY] = pipeline.AUTH_ENTRY_REGISTER_API
pipeline_user = None
error_message = ""
try:
pipeline_user = request.backend.do_auth(social_access_token, user=user)
except AuthAlreadyAssociated:
error_message = _("The provided access_token is already associated with another user.")
except (HTTPError, AuthException):
error_message = _("The provided access_token is not valid.")
if not pipeline_user or not isinstance(pipeline_user, User):
# Ensure user does not re-enter the pipeline
request.social_strategy.clean_partial_pipeline()
raise ValidationError({'access_token': [error_message]})
# Perform operations that are non-critical parts of account creation
preferences_api.set_user_preference(user, LANGUAGE_KEY, get_language())
if settings.FEATURES.get('ENABLE_DISCUSSION_EMAIL_DIGEST'):
try:
enable_notifications(user)
except Exception:
log.exception("Enable discussion notifications failed for user {id}.".format(id=user.id))
dog_stats_api.increment("common.student.account_created")
# If the user is registering via 3rd party auth, track which provider they use
third_party_provider = None
running_pipeline = None
if third_party_auth.is_enabled() and pipeline.running(request):
running_pipeline = pipeline.get(request)
third_party_provider = provider.Registry.get_from_pipeline(running_pipeline)
# Track the user's registration
if settings.FEATURES.get('SEGMENT_IO_LMS') and hasattr(settings, 'SEGMENT_IO_LMS_KEY'):
tracking_context = tracker.get_tracker().resolve_context()
analytics.identify(user.id, {
'email': user.email,
'username': user.username,
})
analytics.track(
user.id,
"edx.bi.user.account.registered",
{
'category': 'conversion',
'label': params.get('course_id'),
'provider': third_party_provider.name if third_party_provider else None
},
context={
'Google Analytics': {
'clientId': tracking_context.get('client_id')
}
}
)
create_comments_service_user(user)
# Don't send email if we are:
#
# 1. Doing load testing.
# 2. Random user generation for other forms of testing.
# 3. External auth bypassing activation.
# 4. Have the platform configured to not require e-mail activation.
# 5. Registering a new user using a trusted third party provider (with skip_email_verification=True)
#
# Note that this feature is only tested as a flag set one way or
# the other for *new* systems. we need to be careful about
# changing settings on a running system to make sure no users are
# left in an inconsistent state (or doing a migration if they are).
send_email = (
not settings.FEATURES.get('SKIP_EMAIL_VALIDATION', None) and
not settings.FEATURES.get('AUTOMATIC_AUTH_FOR_TESTING') and
not (do_external_auth and settings.FEATURES.get('BYPASS_ACTIVATION_EMAIL_FOR_EXTAUTH')) and
not (
third_party_provider and third_party_provider.skip_email_verification and
user.email == running_pipeline['kwargs'].get('details', {}).get('email')
)
)
if send_email:
context = {
'name': profile.name,
'key': registration.activation_key,
}
# composes activation email
subject = render_to_string('emails/activation_email_subject.txt', context)
# Email subject *must not* contain newlines
subject = ''.join(subject.splitlines())
message = render_to_string('emails/activation_email.txt', context)
from_address = microsite.get_value(
'email_from_address',
settings.DEFAULT_FROM_EMAIL
)
try:
if settings.FEATURES.get('REROUTE_ACTIVATION_EMAIL'):
dest_addr = settings.FEATURES['REROUTE_ACTIVATION_EMAIL']
message = ("Activation for %s (%s): %s\n" % (user, user.email, profile.name) +
'-' * 80 + '\n\n' + message)
mail.send_mail(subject, message, from_address, [dest_addr], fail_silently=False)
else:
user.email_user(subject, message, from_address)
except Exception: # pylint: disable=broad-except
log.error(u'Unable to send activation email to user from "%s"', from_address, exc_info=True)
else:
registration.activate()
# Immediately after a user creates an account, we log them in. They are only
# logged in until they close the browser. They can't log in again until they click
# the activation link from the email.
new_user = authenticate(username=user.username, password=params['password'])
login(request, new_user)
request.session.set_expiry(0)
all_tags = Tag.objects.all()
for tag in all_tags:
if params[tag.name.lower()] == 'true':
UserInterestingTag.objects.create(
user_id=new_user.id,
tag_id=tag.id
)
# TODO: there is no error checking here to see that the user actually logged in successfully,
# and is not yet an active user.
if new_user is not None:
AUDIT_LOG.info(u"Login success on new account creation - {0}".format(new_user.username))
if do_external_auth:
eamap.user = new_user
eamap.dtsignup = datetime.datetime.now(UTC)
eamap.save()
AUDIT_LOG.info(u"User registered with external_auth %s", new_user.username)
AUDIT_LOG.info(u'Updated ExternalAuthMap for %s to be %s', new_user.username, eamap)
if settings.FEATURES.get('BYPASS_ACTIVATION_EMAIL_FOR_EXTAUTH'):
log.info('bypassing activation email')
new_user.is_active = True
new_user.save()
AUDIT_LOG.info(u"Login activated on extauth account - {0} ({1})".format(new_user.username, new_user.email))
return new_user
@csrf_exempt
def create_account(request, post_override=None):
"""
JSON call to create new edX account.
Used by form in signup_modal.html, which is included into navigation.html
"""
warnings.warn("Please use RegistrationView instead.", DeprecationWarning)
try:
user = create_account_with_params(request, post_override or request.POST)
except AccountValidationError as exc:
return JsonResponse({'success': False, 'value': exc.message, 'field': exc.field}, status=400)
except ValidationError as exc:
field, error_list = next(exc.message_dict.iteritems())
return JsonResponse(
{
"success": False,
"field": field,
"value": error_list[0],
},
status=400
)
redirect_url = None # The AJAX method calling should know the default destination upon success
# Resume the third-party-auth pipeline if necessary.
if third_party_auth.is_enabled() and pipeline.running(request):
running_pipeline = pipeline.get(request)
redirect_url = pipeline.get_complete_url(running_pipeline['backend'])
response = JsonResponse({
'success': True,
'redirect_url': redirect_url,
})
set_logged_in_cookies(request, response, user)
return response
def auto_auth(request):
"""
Create or configure a user account, then log in as that user.
Enabled only when
settings.FEATURES['AUTOMATIC_AUTH_FOR_TESTING'] is true.
Accepts the following querystring parameters:
* `username`, `email`, and `password` for the user account
* `full_name` for the user profile (the user's full name; defaults to the username)
* `staff`: Set to "true" to make the user global staff.
* `course_id`: Enroll the student in the course with `course_id`
* `roles`: Comma-separated list of roles to grant the student in the course with `course_id`
* `no_login`: Define this to create the user but not login
If username, email, or password are not provided, use
randomly generated credentials.
"""
# Generate a unique name to use if none provided
unique_name = uuid.uuid4().hex[0:30]
# Use the params from the request, otherwise use these defaults
username = request.GET.get('username', unique_name)
password = request.GET.get('password', unique_name)
email = request.GET.get('email', unique_name + "@example.com")
full_name = request.GET.get('full_name', username)
is_staff = request.GET.get('staff', None)
course_id = request.GET.get('course_id', None)
course_key = None
if course_id:
course_key = CourseLocator.from_string(course_id)
role_names = [v.strip() for v in request.GET.get('roles', '').split(',') if v.strip()]
login_when_done = 'no_login' not in request.GET
form = AccountCreationForm(
data={
'username': username,
'email': email,
'password': password,
'name': full_name,
},
tos_required=False
)
# Attempt to create the account.
# If successful, this will return a tuple containing
# the new user object.
try:
user, profile, reg = _do_create_account(form)
except AccountValidationError:
# Attempt to retrieve the existing user.
user = User.objects.get(username=username)
user.email = email
user.set_password(password)
user.save()
profile = UserProfile.objects.get(user=user)
reg = Registration.objects.get(user=user)
# Set the user's global staff bit
if is_staff is not None:
user.is_staff = (is_staff == "true")
user.save()
# Activate the user
reg.activate()
reg.save()
# ensure parental consent threshold is met
year = datetime.date.today().year
age_limit = settings.PARENTAL_CONSENT_AGE_LIMIT
profile.year_of_birth = (year - age_limit) - 1
profile.save()
# Enroll the user in a course
if course_key is not None:
CourseEnrollment.enroll(user, course_key)
# Apply the roles
for role_name in role_names:
role = Role.objects.get(name=role_name, course_id=course_key)
user.roles.add(role)
# Log in as the user
if login_when_done:
user = authenticate(username=username, password=password)
login(request, user)
create_comments_service_user(user)
# Provide the user with a valid CSRF token
# then return a 200 response
if request.META.get('HTTP_ACCEPT') == 'application/json':
response = JsonResponse({
'created_status': u"Logged in" if login_when_done else "Created",
'username': username,
'email': email,
'password': password,
'user_id': user.id, # pylint: disable=no-member
'anonymous_id': anonymous_id_for_user(user, None),
})
else:
success_msg = u"{} user {} ({}) with password {} and user_id {}".format(
u"Logged in" if login_when_done else "Created",
username, email, password, user.id # pylint: disable=no-member
)
response = HttpResponse(success_msg)
response.set_cookie('csrftoken', csrf(request)['csrf_token'])
return response
@ensure_csrf_cookie
def activate_account(request, key):
"""When link in activation e-mail is clicked"""
regs = Registration.objects.filter(activation_key=key)
if len(regs) == 1:
user_logged_in = request.user.is_authenticated()
already_active = True
if not regs[0].user.is_active:
regs[0].activate()
already_active = False
# Enroll student in any pending courses he/she may have if auto_enroll flag is set
student = User.objects.filter(id=regs[0].user_id)
if student:
ceas = CourseEnrollmentAllowed.objects.filter(email=student[0].email)
for cea in ceas:
if cea.auto_enroll:
enrollment = CourseEnrollment.enroll(student[0], cea.course_id)
manual_enrollment_audit = ManualEnrollmentAudit.get_manual_enrollment_by_email(student[0].email)
if manual_enrollment_audit is not None:
# get the enrolled by user and reason from the ManualEnrollmentAudit table.
# then create a new ManualEnrollmentAudit table entry for the same email
# different transition state.
ManualEnrollmentAudit.create_manual_enrollment_audit(
manual_enrollment_audit.enrolled_by, student[0].email, ALLOWEDTOENROLL_TO_ENROLLED,
manual_enrollment_audit.reason, enrollment
)
resp = render_to_response(
"registration/activation_complete.html",
{
'user_logged_in': user_logged_in,
'already_active': already_active
}
)
return resp
if len(regs) == 0:
return render_to_response(
"registration/activation_invalid.html",
{'csrf': csrf(request)['csrf_token']}
)
return HttpResponseServerError(_("Unknown error. Please e-mail us to let us know how it happened."))
@csrf_exempt
@require_POST
def password_reset(request):
""" Attempts to send a password reset e-mail. """
# Add some rate limiting here by re-using the RateLimitMixin as a helper class
limiter = BadRequestRateLimiter()
if limiter.is_rate_limit_exceeded(request):
AUDIT_LOG.warning("Rate limit exceeded in password_reset")
return HttpResponseForbidden()
form = PasswordResetFormNoActive(request.POST)
if form.is_valid():
form.save(use_https=request.is_secure(),
from_email=settings.DEFAULT_FROM_EMAIL,
request=request,
domain_override=request.get_host())
# When password change is complete, a "edx.user.settings.changed" event will be emitted.
# But because changing the password is multi-step, we also emit an event here so that we can
# track where the request was initiated.
tracker.emit(
SETTING_CHANGE_INITIATED,
{
"setting": "password",
"old": None,
"new": None,
"user_id": request.user.id,
}
)
else:
# bad user? tick the rate limiter counter
AUDIT_LOG.info("Bad password_reset user passed in.")
limiter.tick_bad_request_counter(request)
return JsonResponse({
'success': True,
'value': render_to_string('registration/password_reset_done.html', {}),
})
def password_reset_confirm_wrapper(
request,
uidb36=None,
token=None,
):
""" A wrapper around django.contrib.auth.views.password_reset_confirm.
Needed because we want to set the user as active at this step.
"""
# cribbed from django.contrib.auth.views.password_reset_confirm
try:
uid_int = base36_to_int(uidb36)
user = User.objects.get(id=uid_int)
user.is_active = True
user.save()
except (ValueError, User.DoesNotExist):
pass
# tie in password strength enforcement as an optional level of
# security protection
err_msg = None
if request.method == 'POST':
password = request.POST['new_password1']
if settings.FEATURES.get('ENFORCE_PASSWORD_POLICY', False):
try:
validate_password_length(password)
validate_password_complexity(password)
validate_password_dictionary(password)
except ValidationError, err:
err_msg = _('Password: ') + '; '.join(err.messages)
# also, check the password reuse policy
if not PasswordHistory.is_allowable_password_reuse(user, password):
if user.is_staff:
num_distinct = settings.ADVANCED_SECURITY_CONFIG['MIN_DIFFERENT_STAFF_PASSWORDS_BEFORE_REUSE']
else:
num_distinct = settings.ADVANCED_SECURITY_CONFIG['MIN_DIFFERENT_STUDENT_PASSWORDS_BEFORE_REUSE']
err_msg = ungettext(
"You are re-using a password that you have used recently. You must have {num} distinct password before reusing a previous password.",
"You are re-using a password that you have used recently. You must have {num} distinct passwords before reusing a previous password.",
num_distinct
).format(num=num_distinct)
# also, check to see if passwords are getting reset too frequent
if PasswordHistory.is_password_reset_too_soon(user):
num_days = settings.ADVANCED_SECURITY_CONFIG['MIN_TIME_IN_DAYS_BETWEEN_ALLOWED_RESETS']
err_msg = ungettext(
"You are resetting passwords too frequently. Due to security policies, {num} day must elapse between password resets.",
"You are resetting passwords too frequently. Due to security policies, {num} days must elapse between password resets.",
num_days
).format(num=num_days)
if err_msg:
# We have an password reset attempt which violates some security policy, use the
# existing Django template to communicate this back to the user
context = {
'validlink': True,
'form': None,
'title': _('Password reset unsuccessful'),
'err_msg': err_msg,
'platform_name': settings.PLATFORM_NAME,
}
return TemplateResponse(request, 'registration/password_reset_confirm.html', context)
else:
# we also want to pass settings.PLATFORM_NAME in as extra_context
extra_context = {"platform_name": settings.PLATFORM_NAME}
if request.method == 'POST':
# remember what the old password hash is before we call down
old_password_hash = user.password
result = password_reset_confirm(
request, uidb36=uidb36, token=token, extra_context=extra_context
)
# get the updated user
updated_user = User.objects.get(id=uid_int)
# did the password hash change, if so record it in the PasswordHistory
if updated_user.password != old_password_hash:
entry = PasswordHistory()
entry.create(updated_user)
return result
else:
return password_reset_confirm(
request, uidb36=uidb36, token=token, extra_context=extra_context
)
def reactivation_email_for_user(user):
try:
reg = Registration.objects.get(user=user)
except Registration.DoesNotExist:
return JsonResponse({
"success": False,
"error": _('No inactive user with this e-mail exists'),
}) # TODO: this should be status code 400 # pylint: disable=fixme
context = {
'name': user.profile.name,
'key': reg.activation_key,
}
subject = render_to_string('emails/activation_email_subject.txt', context)
subject = ''.join(subject.splitlines())
message = render_to_string('emails/activation_email.txt', context)
try:
user.email_user(subject, message, settings.DEFAULT_FROM_EMAIL)
except Exception: # pylint: disable=broad-except
log.error(u'Unable to send reactivation email from "%s"', settings.DEFAULT_FROM_EMAIL, exc_info=True)
return JsonResponse({
"success": False,
"error": _('Unable to send reactivation email')
}) # TODO: this should be status code 500 # pylint: disable=fixme
return JsonResponse({"success": True})
def validate_new_email(user, new_email):
"""
Given a new email for a user, does some basic verification of the new address If any issues are encountered
with verification a ValueError will be thrown.
"""
try:
validate_email(new_email)
except ValidationError:
raise ValueError(_('Valid e-mail address required.'))
if new_email == user.email:
raise ValueError(_('Old email is the same as the new email.'))
if User.objects.filter(email=new_email).count() != 0:
raise ValueError(_('An account with this e-mail already exists.'))
def do_email_change_request(user, new_email, activation_key=None):
"""
Given a new email for a user, does some basic verification of the new address and sends an activation message
to the new address. If any issues are encountered with verification or sending the message, a ValueError will
be thrown.
"""
pec_list = PendingEmailChange.objects.filter(user=user)
if len(pec_list) == 0:
pec = PendingEmailChange()
pec.user = user
else:
pec = pec_list[0]
# if activation_key is not passing as an argument, generate a random key
if not activation_key:
activation_key = uuid.uuid4().hex
pec.new_email = new_email
pec.activation_key = activation_key
pec.save()
context = {
'key': pec.activation_key,
'old_email': user.email,
'new_email': pec.new_email
}
subject = render_to_string('emails/email_change_subject.txt', context)
subject = ''.join(subject.splitlines())
message = render_to_string('emails/email_change.txt', context)
from_address = microsite.get_value(
'email_from_address',
settings.DEFAULT_FROM_EMAIL
)
try:
mail.send_mail(subject, message, from_address, [pec.new_email])
except Exception: # pylint: disable=broad-except
log.error(u'Unable to send email activation link to user from "%s"', from_address, exc_info=True)
raise ValueError(_('Unable to send email activation link. Please try again later.'))
# When the email address change is complete, a "edx.user.settings.changed" event will be emitted.
# But because changing the email address is multi-step, we also emit an event here so that we can
# track where the request was initiated.
tracker.emit(
SETTING_CHANGE_INITIATED,
{
"setting": "email",
"old": context['old_email'],
"new": context['new_email'],
"user_id": user.id,
}
)
@ensure_csrf_cookie
@transaction.commit_manually
def confirm_email_change(request, key): # pylint: disable=unused-argument
"""
User requested a new e-mail. This is called when the activation
link is clicked. We confirm with the old e-mail, and update
"""
try:
try:
pec = PendingEmailChange.objects.get(activation_key=key)
except PendingEmailChange.DoesNotExist:
response = render_to_response("invalid_email_key.html", {})
transaction.rollback()
return response
user = pec.user
address_context = {
'old_email': user.email,
'new_email': pec.new_email
}
if len(User.objects.filter(email=pec.new_email)) != 0:
response = render_to_response("email_exists.html", {})
transaction.rollback()
return response
subject = render_to_string('emails/email_change_subject.txt', address_context)
subject = ''.join(subject.splitlines())
message = render_to_string('emails/confirm_email_change.txt', address_context)
u_prof = UserProfile.objects.get(user=user)
meta = u_prof.get_meta()
if 'old_emails' not in meta:
meta['old_emails'] = []
meta['old_emails'].append([user.email, datetime.datetime.now(UTC).isoformat()])
u_prof.set_meta(meta)
u_prof.save()
# Send it to the old email...
try:
user.email_user(subject, message, settings.DEFAULT_FROM_EMAIL)
except Exception: # pylint: disable=broad-except
log.warning('Unable to send confirmation email to old address', exc_info=True)
response = render_to_response("email_change_failed.html", {'email': user.email})
transaction.rollback()
return response
user.email = pec.new_email
user.save()
pec.delete()
# And send it to the new email...
try:
user.email_user(subject, message, settings.DEFAULT_FROM_EMAIL)
except Exception: # pylint: disable=broad-except
log.warning('Unable to send confirmation email to new address', exc_info=True)
response = render_to_response("email_change_failed.html", {'email': pec.new_email})
transaction.rollback()
return response
response = render_to_response("email_change_successful.html", address_context)
transaction.commit()
return response
except Exception: # pylint: disable=broad-except
# If we get an unexpected exception, be sure to rollback the transaction
transaction.rollback()
raise
@require_POST
@login_required
@ensure_csrf_cookie
def change_email_settings(request):
"""Modify logged-in user's setting for receiving emails from a course."""
user = request.user
course_id = request.POST.get("course_id")
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
receive_emails = request.POST.get("receive_emails")
if receive_emails:
optout_object = Optout.objects.filter(user=user, course_id=course_key)
if optout_object:
optout_object.delete()
log.info(
u"User %s (%s) opted in to receive emails from course %s",
user.username,
user.email,
course_id
)
track.views.server_track(request, "change-email-settings", {"receive_emails": "yes", "course": course_id}, page='dashboard')
else:
Optout.objects.get_or_create(user=user, course_id=course_key)
log.info(
u"User %s (%s) opted out of receiving emails from course %s",
user.username,
user.email,
course_id
)
track.views.server_track(request, "change-email-settings", {"receive_emails": "no", "course": course_id}, page='dashboard')
return JsonResponse({"success": True})
|
mjirayu/sit_academy
|
common/djangoapps/student/views.py
|
Python
|
agpl-3.0
| 92,243
|
[
"VisIt"
] |
6040889533cc42b58c1fe9b3a3d355c9cff4e76924d9715a6abe72b37e6558c6
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'SurveyQuestionResponse.positive_response'
db.add_column(u'survey_surveyquestionresponse', 'positive_response',
self.gf('django.db.models.fields.NullBooleanField')(default=None, null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'SurveyQuestionResponse.positive_response'
db.delete_column(u'survey_surveyquestionresponse', 'positive_response')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'clinics.clinic': {
'Meta': {'object_name': 'Clinic'},
'code': ('django.db.models.fields.PositiveIntegerField', [], {'unique': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lga': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'lga_rank': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'location': ('django.contrib.gis.db.models.fields.PointField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'pbf_rank': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'town': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'ward': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'clinics.clinicstaff': {
'Meta': {'object_name': 'ClinicStaff'},
'clinic': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['clinics.Clinic']"}),
'contact': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['rapidsms.Contact']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_manager': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'staff_type': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'year_started': ('django.db.models.fields.CharField', [], {'max_length': '4', 'blank': 'True'})
},
u'clinics.patient': {
'Meta': {'unique_together': "[('clinic', 'serial')]", 'object_name': 'Patient'},
'clinic': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['clinics.Clinic']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mobile': ('django.db.models.fields.CharField', [], {'max_length': '11', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'serial': ('django.db.models.fields.PositiveIntegerField', [], {})
},
u'clinics.service': {
'Meta': {'object_name': 'Service'},
'code': ('django.db.models.fields.PositiveIntegerField', [], {'unique': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'})
},
u'clinics.visit': {
'Meta': {'object_name': 'Visit'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mobile': ('django.db.models.fields.CharField', [], {'max_length': '11', 'blank': 'True'}),
'patient': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['clinics.Patient']"}),
'satisfied': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'sender': ('django.db.models.fields.CharField', [], {'max_length': '11', 'blank': 'True'}),
'service': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['clinics.Service']", 'null': 'True', 'blank': 'True'}),
'staff': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['clinics.ClinicStaff']", 'null': 'True', 'blank': 'True'}),
'survey_completed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'survey_sent': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'survey_started': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'visit_time': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'welcome_sent': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'rapidsms.contact': {
'Meta': {'object_name': 'Contact'},
'created_on': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '6', 'blank': 'True'}),
'modified_on': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
u'survey.displaylabel': {
'Meta': {'object_name': 'DisplayLabel'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'survey.survey': {
'Meta': {'object_name': 'Survey'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'flow_id': ('django.db.models.fields.IntegerField', [], {'unique': 'True', 'max_length': '32'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'role': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True', 'blank': 'True'})
},
u'survey.surveyquestion': {
'Meta': {'unique_together': "[('survey', 'label')]", 'object_name': 'SurveyQuestion'},
'categories': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'display_label': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['survey.DisplayLabel']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'last_negative': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'question': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'question_id': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'question_type': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'survey': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['survey.Survey']"})
},
u'survey.surveyquestionresponse': {
'Meta': {'unique_together': "[('visit', 'question')]", 'object_name': 'SurveyQuestionResponse'},
'clinic': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['clinics.Clinic']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'display_on_dashboard': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'positive_response': ('django.db.models.fields.NullBooleanField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['survey.SurveyQuestion']"}),
'response': ('django.db.models.fields.TextField', [], {}),
'service': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['clinics.Service']", 'null': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'visit': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['clinics.Visit']", 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['survey']
|
myvoice-nigeria/myvoice
|
myvoice/survey/migrations/0013_auto__add_field_surveyquestionresponse_positive_response.py
|
Python
|
bsd-2-clause
| 12,899
|
[
"VisIt"
] |
ba92bf33ae1eecb8bc6b3d5d70201616b87237fe4aeb60f39caf974711be24a2
|
import unittest
from nose.tools import (assert_in, assert_raises, assert_equals)
import io
import logging
import numpy
from sknn.mlp import MultiLayerPerceptron as MLP
from sknn.mlp import Regressor as MLPR
from sknn.mlp import Classifier as MLPC
from sknn.mlp import Layer as L
import sknn.mlp
class TestTrainingProcedure(unittest.TestCase):
def test_FitTerminateStable(self):
a_in, a_out = numpy.zeros((8,16)), numpy.zeros((8,4))
activation = "Gaussian" if sknn.backend.name == "pylearn2" else "Linear"
self.nn = MLP(
layers=[L(activation)], learning_rate=0.001,
n_iter=None, n_stable=1, f_stable=0.01,
valid_set=(a_in, a_out))
self.nn._fit(a_in, a_out)
def test_FitAutomaticValidation(self):
a_in, a_out = numpy.zeros((8,16)), numpy.zeros((8,4))
self.nn = MLP(
layers=[L("Linear")], learning_rate=0.001,
n_iter=10, n_stable=1, f_stable=0.1,
valid_size=0.25)
self.nn._fit(a_in, a_out)
def test_TrainingInfinite(self):
a_in, a_out = numpy.zeros((8,16)), numpy.zeros((8,4))
self.nn = MLP(layers=[L("Linear")], n_iter=None, n_stable=None)
assert_raises(AssertionError, self.nn._fit, a_in, a_out)
def test_TrainingUserDefined(self):
self.counter = 0
def terminate(**_):
self.counter += 1
return False
a_in, a_out = numpy.zeros((8,16)), numpy.zeros((8,4))
self.nn = MLP(layers=[L("Linear")], n_iter=100, n_stable=None, callback={'on_epoch_finish': terminate})
self.nn._fit(a_in, a_out)
assert_equals(self.counter, 1)
class TestBatchSize(unittest.TestCase):
def setUp(self):
self.batch_count = 0
self.batch_items = 0
self.nn = MLP(
layers=[L("Rectifier")],
learning_rate=0.001, n_iter=1,
callback={'on_batch_start': self.on_batch_start})
def on_batch_start(self, Xb, **args):
self.batch_count += 1
self.batch_items += Xb.shape[0]
assert Xb.shape[0] <= self.nn.batch_size
def test_BatchSizeLargerThanInput(self):
self.nn.batch_size = 32
a_in, a_out = numpy.zeros((8,16)), numpy.ones((8,4))
self.nn._fit(a_in, a_out)
assert_equals(1, self.batch_count)
assert_equals(8, self.batch_items)
def test_BatchSizeSmallerThanInput(self):
self.nn.batch_size = 4
a_in, a_out = numpy.ones((8,16)), numpy.zeros((8,4))
self.nn._fit(a_in, a_out)
assert_equals(2, self.batch_count)
assert_equals(8, self.batch_items)
def test_BatchSizeNonMultiple(self):
self.nn.batch_size = 4
a_in, a_out = numpy.zeros((9,16)), numpy.ones((9,4))
self.nn._fit(a_in, a_out)
assert_equals(3, self.batch_count)
assert_equals(9, self.batch_items)
class TestCustomLogging(unittest.TestCase):
def setUp(self):
self.log = logging.getLogger('sknn')
self.log.handlers = []
self.backup, self.log.parent.handlers = self.log.parent.handlers, []
def tearDown(self):
self.log.parent.handlers = self.backup
def test_DefaultLogVerbose(self):
nn = MLPR(layers=[L("Linear")], verbose=True)
assert_equals(1, len(self.log.handlers))
assert_equals(logging.DEBUG, self.log.handlers[0].level)
def test_DefaultLogQuiet(self):
nn = MLPR(layers=[L("Linear")], verbose=False)
assert_equals(1, len(self.log.handlers))
assert_equals(logging.WARNING, self.log.handlers[0].level)
def test_VerboseNoneNoLog(self):
nn = MLPR(layers=[L("Linear")], verbose=None)
assert_equals(0, len(self.log.handlers))
class TestTrainingOutput(unittest.TestCase):
def setUp(self):
self.buf = io.StringIO()
self.hnd = logging.StreamHandler(self.buf)
logging.getLogger('sknn').addHandler(self.hnd)
def tearDown(self):
sknn.mlp.log.removeHandler(self.hnd)
def test_VerboseRegressor(self):
nn = MLPR(layers=[L("Linear")], verbose=1, n_iter=1)
a_in, a_out = numpy.zeros((8,16)), numpy.zeros((8,4))
nn.fit(a_in, a_out)
assert_in("Epoch Training Error Validation Error Time", self.buf.getvalue())
assert_in(" 1 ", self.buf.getvalue())
assert_in(" N/A ", self.buf.getvalue())
def test_VerboseClassifier(self):
nn = MLPC(layers=[L("Softmax")], verbose=1, n_iter=1)
a_in, a_out = numpy.zeros((8,16)), numpy.zeros((8,1), dtype=numpy.int32)
nn.fit(a_in, a_out)
assert_in("Epoch Training Error Validation Error Time", self.buf.getvalue())
assert_in(" 1 ", self.buf.getvalue())
assert_in(" N/A ", self.buf.getvalue())
def test_CaughtRuntimeError(self):
nn = MLPC(layers=[L("Linear")], learning_rate=float("nan"), n_iter=1)
a_in, a_out = numpy.zeros((8,16)), numpy.zeros((8,1), dtype=numpy.int32)
assert_raises(RuntimeError, nn.fit, a_in, a_out)
assert_in("A runtime exception was caught during training.", self.buf.getvalue())
|
jgdwyer/ML-convection
|
sknn_jgd/tests/test_training.py
|
Python
|
apache-2.0
| 5,275
|
[
"Gaussian"
] |
e713c9ebc899cdf97abad85a22141d01276a4b23dd21108a5471fac79127c8e2
|
#!/usr/bin/env python
"""
ParseBlast.py
takes as input an xml results file
creates a csv summary results file
can pass file handles for results files
instead of file paths if running over
multiple files
BLAST against the swissprot database and it returns the protein gi number
BLAST against the nr database and it returns the protein gi number
The identifier in the results is relevant to the target database.
"""
### make imports
import sys,os,re,time,csv,getopt
from Bio.Blast import NCBIXML
class ParseBlast(object):
"""
parse the xml files for BLAST results
"""
def __init__(self,filePath,outFile=None,fhResults=None,fhLog=None,BLASTDB=None):
"""
outFile - location of where to put results
fhResults - results file handle (csv.writer)
fgLog - log file handle (csv.writer)
"""
## error checking
if filePath == None or not re.search("\.xml|\.outfmt5",filePath):
raise Exception("invalid results file - must be of type xml \n%s"%filePath)
self.filePath = filePath
self.fid1,self.fid2 = None,None
## prepare a log file
if fhLog == None:
resultsHomeDir,fileName = os.path.split(filePath)
self.fid1 = open(os.path.join(".",'%s_parsed.log'%re.sub("\.xml|\.outfmt5","",fileName)),'w')
self.logWriter = csv.writer(self.fid1)
else:
self.logWriter = fhLog
## prepare a summary file
if fhResults == None:
if outFile == None:
outFile = os.path.join('.','%s_parsed.csv'%re.sub("\.xml|\.outfmt5","",fileName))
self.fid2 = open(outFile,'w')
self.resultsWriter = csv.writer(self.fid2)
self.resultsWriter.writerow(["query","hit-identifier","hit-identifier-long","e-score","bit-score"])
else:
self.resultsWriter = fhResults
self.outFile = outFile
## set the environmental variable if specified
if BLASTDB != None:
os.environ['BLASTDB'] = BLASTDB
def push_out(self,line):
"""
push a string to both STDOUT and the logfile
"""
self.logWriter.writerow([line])
print(line)
def run(self):
"""
run the parser
currently only saves the best result
"""
self.push_out(sys.argv[0])
self.push_out(time.asctime())
self.push_out("Parsing results file... %s"%self.filePath)
result_handle = open(self.filePath)
blast = NCBIXML.parse(result_handle)
hasResults = set([])
noResults = set([])
totalHits = 0
print("parsing... %s"%('0'),)
for record in blast:
totalHits += 1
print("\rparsing... %s"%(totalHits),)
if record.alignments:
query = record.query
for align in record.alignments:
identifier = align.accession
hitIdLong = align.title
escore = align.hsps[0].expect
bitScore = align.hsps[0].score
## remove any commas and write the results
self.resultsWriter.writerow([re.sub(",", "", x) for x in [query,str(identifier),hitIdLong]] + [escore,bitScore])
hasResults.update([query])
else:
noResults.update([record.query])
hasResults = list(hasResults)
noResults = list(noResults)
print("\n")
self.push_out("total hits: %s"%totalHits)
self.push_out("sequences with at least one match : %s"%len(hasResults))
self.push_out("sequences without any matches: %s"%len(noResults))
## clean up
#if self.fid1 != None:
try:
self.fid1.close()
except:
pass
try:
self.fid2.close()
except:
pass
print('complete.')
return self.outFile
|
ajrichards/htsint
|
htsint/blast/ParseBlast.py
|
Python
|
bsd-3-clause
| 4,046
|
[
"BLAST"
] |
c76eb3420f2ddd579aed3765af64e8daf913f99f42a81a15bfd976d34d9a8097
|
""" The gateway service is used for forwarding service calls to the appropriate services.
For this to be used, the following CS option is required:
DIRAC
{
Gateways
{
my.site.org = dips://thisIsAn.url.org:9159/Framework/Gateway
}
}
At the same time, this same gateway service should be run with option /LocalInstallation/Site
which is different from "my.site.org" or whatever is set in the option above, to avoid initialization loops.
"""
__RCSID__ = "$id:"
import sys
import cStringIO
import DIRAC
from DIRAC import gLogger, S_OK, S_ERROR
from DIRAC.Core.Utilities.LockRing import LockRing
from DIRAC.Core.Utilities.DictCache import DictCache
from DIRAC.ConfigurationSystem.Client.ConfigurationData import gConfigurationData
from DIRAC.Core.DISET.private.FileHelper import FileHelper
from DIRAC.Core.DISET.private.MessageBroker import MessageBroker, getGlobalMessageBroker
from DIRAC.Core.DISET.MessageClient import MessageClient
from DIRAC.Core.Security.X509Chain import X509Chain
from DIRAC.Core.Utilities.ThreadPool import ThreadPool
from DIRAC.Core.DISET.private.Service import Service
from DIRAC.Core.DISET.RPCClient import RPCClient
from DIRAC.Core.DISET.TransferClient import TransferClient
from DIRAC.Core.DISET.private.BaseClient import BaseClient
class GatewayService( Service ):
""" Inherits from Service so it can (and should) be run as a DIRAC service,
but replaces several of the internal methods
"""
GATEWAY_NAME = "Framework/Gateway"
def __init__( self ):
""" Initialize like a real service
"""
super(GatewayService, self).__init__(
{'modName':GatewayService.GATEWAY_NAME,
'loadName':GatewayService.GATEWAY_NAME,
'standalone': True,
'moduleObj': sys.modules[DIRAC.Core.DISET.private.GatewayService.GatewayService.__module__],
'classObj': self.__class__} )
self.__delegatedCredentials = DictCache()
self.__transferBytesLimit = 1024 * 1024 * 100
# to be resolved
self._url = None
self._handler = None
self._threadPool = None
self._msgBroker = None
self._msgForwarder = None
def initialize( self ):
""" This replaces the standard initialize from Service
"""
#Build the URLs
self._url = self._cfg.getURL()
if not self._url:
return S_ERROR( "Could not build service URL for %s" % GatewayService.GATEWAY_NAME )
gLogger.verbose( "Service URL is %s" % self._url )
#Load handler
result = self._loadHandlerInit()
if not result[ 'OK' ]:
return result
self._handler = result[ 'Value' ]
#Discover Handler
self._threadPool = ThreadPool( 1,
max( 0, self._cfg.getMaxThreads() ),
self._cfg.getMaxWaitingPetitions() )
self._threadPool.daemonize()
self._msgBroker = MessageBroker( "%sMSB" % GatewayService.GATEWAY_NAME, threadPool = self._threadPool )
self._msgBroker.useMessageObjects( False )
getGlobalMessageBroker().useMessageObjects( False )
self._msgForwarder = MessageForwarder( self._msgBroker )
return S_OK()
def _processInThread( self, clientTransport ):
""" Threaded process function
"""
#Handshake
try:
clientTransport.handshake()
except:
return
#Add to the transport pool
trid = self._transportPool.add( clientTransport )
if not trid:
return
#Receive and check proposal
result = self._receiveAndCheckProposal( trid )
if not result[ 'OK' ]:
self._transportPool.sendAndClose( trid, result )
return
proposalTuple = result[ 'Value' ]
#Instantiate handler
result = self.__getClientInitArgs( trid, proposalTuple )
if not result[ 'OK' ]:
self._transportPool.sendAndClose( trid, result )
return
clientInitArgs = result[ 'Value' ]
#Execute the action
result = self._processProposal( trid, proposalTuple, clientInitArgs )
#Close the connection if required
if result[ 'closeTransport' ]:
self._transportPool.close( trid )
return result
def _receiveAndCheckProposal( self, trid ):
clientTransport = self._transportPool.get( trid )
#Get the peer credentials
credDict = clientTransport.getConnectingCredentials()
#Receive the action proposal
retVal = clientTransport.receiveData( 1024 )
if not retVal[ 'OK' ]:
gLogger.error( "Invalid action proposal", "%s %s" % ( self._createIdentityString( credDict,
clientTransport ),
retVal[ 'Message' ] ) )
return S_ERROR( "Invalid action proposal" )
proposalTuple = retVal[ 'Value' ]
gLogger.debug( "Received action from client", "/".join( list( proposalTuple[1] ) ) )
#Check if there are extra credentials
if proposalTuple[2]:
clientTransport.setExtraCredentials( proposalTuple[2] )
return S_OK( proposalTuple )
def __getClientInitArgs( self, trid, proposalTuple ):
clientTransport = self._transportPool.get( trid )
#Get the peer credentials
credDict = clientTransport.getConnectingCredentials()
if 'x509Chain' not in credDict:
return S_OK()
cKey = ( credDict[ 'DN' ],
credDict.get( 'group', False ),
credDict.get( 'extraCredentials', False ),
credDict[ 'isLimitedProxy' ] )
dP = self.__delegatedCredentials.get( cKey, 3600 )
idString = self._createIdentityString( credDict, clientTransport )
if dP:
gLogger.verbose( "Proxy for %s is cached" % idString )
return S_OK( dP )
result = self.__requestDelegation( clientTransport, credDict )
if not result[ 'OK' ]:
gLogger.warn( "Could not get proxy for %s: %s" % ( idString, result[ 'Message' ] ) )
return result
delChain = result[ 'Value' ]
delegatedChain = delChain.dumpAllToString()[ 'Value' ]
secsLeft = delChain.getRemainingSecs()[ 'Value' ] - 1
clientInitArgs = { BaseClient.KW_SETUP : proposalTuple[0][1],
BaseClient.KW_TIMEOUT : 600,
BaseClient.KW_IGNORE_GATEWAYS : True,
BaseClient.KW_USE_CERTIFICATES : False,
BaseClient.KW_PROXY_STRING : delegatedChain
}
if BaseClient.KW_EXTRA_CREDENTIALS in credDict:
clientInitArgs[ BaseClient.KW_EXTRA_CREDENTIALS ] = credDict[ BaseClient.KW_EXTRA_CREDENTIALS ]
gLogger.warn( "Got delegated proxy for %s: %s secs left" % ( idString, secsLeft ) )
self.__delegatedCredentials.add( cKey, secsLeft, clientInitArgs )
return S_OK( clientInitArgs )
def __requestDelegation( self, clientTransport, credDict ):
peerChain = credDict[ 'x509Chain' ]
retVal = peerChain.getCertInChain()[ 'Value' ].generateProxyRequest()
if not retVal[ 'OK' ]:
return retVal
delegationRequest = retVal[ 'Value' ]
retVal = delegationRequest.dumpRequest()
if not retVal[ 'OK' ]:
retVal = S_ERROR( "Server Error: Can't generate delegation request" )
clientTransport.sendData( retVal )
return retVal
gLogger.info( "Sending delegation request for %s" % delegationRequest.getSubjectDN()[ 'Value' ] )
clientTransport.sendData( S_OK( { 'delegate' : retVal[ 'Value' ] } ) )
delegatedCertChain = clientTransport.receiveData()
delegatedChain = X509Chain( keyObj = delegationRequest.getPKey() )
retVal = delegatedChain.loadChainFromString( delegatedCertChain )
if not retVal[ 'OK' ]:
retVal = S_ERROR( "Error in receiving delegated proxy: %s" % retVal[ 'Message' ] )
clientTransport.sendData( retVal )
return retVal
return S_OK( delegatedChain )
#Msg
def _mbConnect( self, trid, handlerObj = None ):
return S_OK()
def _mbReceivedMsg( self, cliTrid, msgObj ):
return self._msgForwarder.msgFromClient( cliTrid, msgObj )
def _mbDisconnect( self, cliTrid ):
self._msgForwarder.cliDisconnect( cliTrid )
#Execute action
def _executeAction( self, trid, proposalTuple, clientInitArgs ):
clientTransport = self._transportPool.get( trid )
credDict = clientTransport.getConnectingCredentials()
targetService = proposalTuple[0][0]
actionType = proposalTuple[1][0]
actionMethod = proposalTuple[1][1]
idString = self._createIdentityString( credDict, clientTransport )
#OOkay! Lets do the magic!
retVal = clientTransport.receiveData()
if not retVal[ 'OK' ]:
gLogger.error( "Error while receiving file description", retVal[ 'Message' ] )
clientTransport.sendData( S_ERROR( "Error while receiving file description: %s" % retVal[ 'Message' ] ) )
return
if actionType == "FileTransfer":
gLogger.warn( "Received a file transfer action from %s" % idString )
clientTransport.sendData( S_OK( "Accepted" ) )
retVal = self.__forwardFileTransferCall( targetService, clientInitArgs,
actionMethod, retVal[ 'Value' ], clientTransport )
elif actionType == "RPC":
gLogger.info( "Forwarding %s/%s action to %s for %s" % ( actionType, actionMethod, targetService, idString ) )
retVal = self.__forwardRPCCall( targetService, clientInitArgs, actionMethod, retVal[ 'Value' ] )
elif actionType == "Connection" and actionMethod == "new":
gLogger.info( "Initiating a messaging connection to %s for %s" % ( targetService, idString ) )
retVal = self._msgForwarder.addClient( trid, targetService, clientInitArgs, retVal[ 'Value' ] )
else:
gLogger.warn( "Received an invalid %s/%s action from %s" % ( actionType, actionMethod, idString ) )
retVal = S_ERROR( "Unknown type of action (%s)" % actionType )
#TODO: Send back the data?
if 'rpcStub' in retVal:
retVal.pop( 'rpcStub' )
clientTransport.sendData( retVal )
return retVal
def __forwardRPCCall( self, targetService, clientInitArgs, method, params ):
if targetService == "Configuration/Server":
if method == "getCompressedDataIfNewer":
#Relay CS data directly
serviceVersion = gConfigurationData.getVersion()
retDict = { 'newestVersion' : serviceVersion }
clientVersion = params[0]
if clientVersion < serviceVersion:
retDict[ 'data' ] = gConfigurationData.getCompressedData()
return S_OK( retDict )
#Default
rpcClient = RPCClient( targetService, **clientInitArgs )
methodObj = getattr( rpcClient, method )
return methodObj( *params )
def __forwardFileTransferCall( self, targetService, clientInitArgs, method,
params, clientTransport ):
transferRelay = TransferRelay( targetService, **clientInitArgs )
transferRelay.setTransferLimit( self.__transferBytesLimit )
cliFH = FileHelper( clientTransport )
#Check file size
if method.find( "ToClient" ) > -1:
cliFH.setDirection( "send" )
elif method.find( "FromClient" ) > -1:
cliFH.setDirection( "receive" )
if not self.__ftCheckMaxTransferSize( params[2] ):
cliFH.markAsTransferred()
return S_ERROR( "Transfer size is too big" )
#Forward queries
try:
relayMethodObject = getattr( transferRelay, 'forward%s' % method )
except:
return S_ERROR( "Cannot forward unknown method %s" % method )
result = relayMethodObject( cliFH, params )
return result
def __ftCheckMaxTransferSize( self, requestedTransferSize ):
if not self.__transferBytesLimit:
return True
if not requestedTransferSize:
return True
if requestedTransferSize <= self.__transferBytesLimit:
return True
return False
class TransferRelay( TransferClient ):
def setTransferLimit( self, trLimit ):
self.__transferBytesLimit = trLimit
self.__currentMethod = ""
def infoMsg( self, msg, dynMsg = "" ):
gLogger.info( "[%s] %s" % ( self.__currentMethod, msg ), dynMsg )
def errMsg( self, msg, dynMsg = "" ):
gLogger.error( "[%s] %s" % ( self.__currentMethod, msg ), dynMsg )
def getDataFromClient( self, clientFileHelper ):
sIO = cStringIO.StringIO()
self.infoMsg( "About to get data from client" )
result = clientFileHelper.networkToDataSink( sIO, self.__transferBytesLimit )
if not result[ 'OK' ]:
sIO.close()
self.errMsg( "Could not get data from client", result[ 'Message' ] )
return result
data = sIO.getvalue()
sIO.close()
self.infoMsg( "Got %s bytes from client" % len( data ) )
return S_OK( data )
def sendDataToClient( self, clientFileHelper, dataToSend ):
self.infoMsg( "About to get send data to client" )
result = clientFileHelper.BufferToNetwork( dataToSend )
if not result[ 'OK' ]:
self.errMsg( "Could not send data to client", result[ 'Message' ] )
return result
self.infoMsg( "Sent %s bytes from client" % len( dataToSend ) )
return S_OK()
def sendDataToService( self, srvMethod, params, data ):
self.infoMsg( "Sending header request to %s" % self.getDestinationService(), str( params ) )
result = self._sendTransferHeader( srvMethod, params )
if not result[ 'OK' ]:
self.errMsg( "Could not send header", result[ 'Message' ] )
return result
self.infoMsg( "Starting to send data to service" )
trid, srvTransport = result[ 'Value' ]
srvFileHelper = FileHelper( srvTransport )
srvFileHelper.setDirection( "send" )
result = srvFileHelper.BufferToNetwork( data )
if not result[ 'OK' ]:
self.errMsg( "Could send data to server", result[ 'Message' ] )
srvTransport.close()
return result
self.infoMsg( "Data sent to service (%s bytes)" % len( data ) )
retVal = srvTransport.receiveData()
srvTransport.close()
return retVal
def getDataFromService( self, srvMethod, params ):
self.infoMsg( "Sending header request to %s" % self.getDestinationService(), str( params ) )
result = self._sendTransferHeader( srvMethod, params )
if not result[ 'OK' ]:
self.errMsg( "Could not send header", result[ 'Message' ] )
return result
self.infoMsg( "Starting to receive data from service" )
trid, srvTransport = result[ 'Value' ]
srvFileHelper = FileHelper( srvTransport )
srvFileHelper.setDirection( "receive" )
sIO = cStringIO.StringIO()
result = srvFileHelper.networkToDataSink( sIO, self.__transferBytesLimit )
if not result[ 'OK' ]:
self.errMsg( "Could not receive data from server", result[ 'Message' ] )
srvTransport.close()
sIO.close()
return result
dataReceived = sIO.getvalue()
sIO.close()
self.infoMsg( "Received %s bytes from service" % len( dataReceived ) )
retVal = srvTransport.receiveData()
srvTransport.close()
if not retVal[ 'OK' ]:
return retVal
return S_OK( { 'data' : dataReceived, 'srvResponse' : retVal } )
def forwardFromClient( self, clientFileHelper, params ):
fileId, token = params[:2]
self.__currentMethod = "FromClient"
result = self.getDataFromClient( clientFileHelper )
if not result[ 'OK' ]:
return result
dataReceived = result[ 'Value' ]
receivedBytes = clientFileHelper.getTransferedBytes()
return self.sendDataToService( "FromClient", ( fileId, token, receivedBytes ), dataReceived )
def forwardBulkFromClient( self, clientFileHelper, params ):
fileId, token = params[:2]
self.__currentMethod = "BulkFromClient"
result = self.getDataFromClient( clientFileHelper )
if not result[ 'OK' ]:
return result
dataReceived = result[ 'Value' ]
receivedBytes = clientFileHelper.getTransferedBytes()
return self.sendDataToService( "BulkFromClient", ( fileId, token, receivedBytes ), dataReceived )
def forwardToClient( self, clientFileHelper, params ):
fileId, token = params[:2]
self.__currentMethod = "ToClient"
result = self.getDataFromService( "ToClient", ( fileId, token ) )
if not result[ 'OK' ]:
return result
dataReceived = result[ 'Value' ][ 'data' ]
srvResponse = result[ 'Value' ][ 'srvResponse' ]
result = self.sendDataToClient( clientFileHelper, dataReceived )
if not result[ 'OK' ]:
return result
return srvResponse
def forwardBulkToClient( self, clientFileHelper, params ):
fileId, token = params[:2]
self.__currentMethod = "BulkToClient"
result = self.getDataFromService( "BulkToClient", ( fileId, token ) )
if not result[ 'OK' ]:
return result
dataReceived = result[ 'Value' ][ 'data' ]
srvResponse = result[ 'Value' ][ 'srvResponse' ]
result = self.sendDataToClient( clientFileHelper, dataReceived )
if not result[ 'OK' ]:
return result
return srvResponse
def forwardListBulk( self, clientFileHelper, params ):
self.__currentMethod = "ListBulk"
self.infoMsg( "Sending header request to %s" % self.getDestinationService(), str( params ) )
result = self._sendTransferHeader( "ListBulk", params )
if not result[ 'OK' ]:
self.errMsg( "Could not send header", result[ 'Message' ] )
return result
trid, srvTransport = result[ 'Value' ]
response = srvTransport.receiveData( 1048576 )
srvTransport.close()
self.infoMsg( "Sending data back to client" )
return response
class MessageForwarder(object):
def __init__( self, msgBroker ):
self.__inOutLock = LockRing().getLock()
self.__msgBroker = msgBroker
self.__byClient = {}
self.__srvToCliTrid = {}
def addClient( self, cliTrid, destination, clientInitParams, connectParams ):
if cliTrid in self.__byClient:
gLogger.fatal( "Trid is duplicated!! this shouldn't happen" )
return
msgClient = MessageClient( destination, **clientInitParams )
msgClient.subscribeToDisconnect( self.__srvDisconnect )
msgClient.subscribeToAllMessages( self.msgFromSrv )
msgClient.setUniqueName( connectParams[0] )
result = msgClient.connect( **connectParams[1] )
if not result[ 'OK' ]:
return result
self.__inOutLock.acquire()
try:
self.__byClient[ cliTrid ] = { 'srvEnd' : msgClient,
'srvTrid' : msgClient.getTrid(),
'srvName' : destination }
self.__srvToCliTrid[ msgClient.getTrid() ] = cliTrid
finally:
self.__inOutLock.release()
return result
def __srvDisconnect( self, srvEndCli ):
try:
cliTrid = self.__srvToCliTrid[ srvEndCli.getTrid() ]
except IndexError:
gLogger.exception( "This shouldn't happen!" )
gLogger.info( "Service %s disconnected messaging connection" % self.__byClient[ cliTrid ][ 'srvName' ] )
self.__msgBroker.removeTransport( cliTrid )
self.__removeClient( cliTrid )
def cliDisconnect( self, cliTrid ):
if cliTrid not in self.__byClient:
gLogger.fatal( "This shouldn't happen!" )
return
gLogger.info( "Client to %s disconnected messaging connection" % self.__byClient[ cliTrid ][ 'srvName' ] )
self.__byClient[ cliTrid ][ 'srvEnd' ].disconnect()
self.__removeClient( cliTrid )
def __removeClient( self, cliTrid ):
self.__inOutLock.acquire()
try:
try:
srvTrid = self.__byClient[ cliTrid ][ 'srvTrid' ]
self.__byClient.pop( cliTrid )
self.__srvToCliTrid.pop( srvTrid )
except Exception as e:
gLogger.exception( "This shouldn't happen!" )
finally:
self.__inOutLock.release()
def msgFromClient( self, cliTrid, msgObj ):
gLogger.info( "Message %s to %s service" % ( msgObj.getName(), self.__byClient[ cliTrid ][ 'srvName' ] ) )
result = self.__byClient[ cliTrid ][ 'srvEnd' ].sendMessage( msgObj )
return result
def msgFromSrv( self, srvEndCli, msgObj ):
try:
cliTrid = self.__srvToCliTrid[ srvEndCli.getTrid() ]
except:
gLogger.exception( "This shouldn't happen" )
return S_ERROR( "MsgFromSrv -> Mismatched srv2cli trid" )
gLogger.info( "Message %s from %s service" % ( msgObj.getName(), self.__byClient[ cliTrid ][ 'srvName' ] ) )
return self.__msgBroker.sendMessage( cliTrid, msgObj )
|
Andrew-McNab-UK/DIRAC
|
Core/DISET/private/GatewayService.py
|
Python
|
gpl-3.0
| 20,150
|
[
"DIRAC"
] |
cd423a8dc1fa76cc34960bf50961fa1c9cd8c57381d04a24cf069adb5a01577c
|
#!/usr/bin/env python
# Copyright 2014-2018 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
import unittest
import numpy
from functools import reduce
from pyscf import lib
from pyscf import gto
from pyscf import symm
from pyscf.symm import geom
numpy.random.seed(12)
u = numpy.random.random((3,3))
u = numpy.linalg.svd(u)[0]
class KnownValues(unittest.TestCase):
def test_d5h(self):
atoms = ringhat(5, u)
atoms = atoms[5:]
gpname, orig, axes = geom.detect_symm(atoms)
self.assertEqual(gpname, 'D5h')
gpname, axes = geom.subgroup(gpname, axes)
atoms = geom.shift_atom(atoms, orig, axes)
self.assertEqual(gpname, 'C2v')
self.assertTrue(geom.check_given_symm('C2v', atoms))
self.assertEqual(geom.symm_identical_atoms(gpname, atoms),
[[0], [1, 4], [2, 3], [5, 6]])
atoms = ringhat(5, u)
atoms = atoms[5:]
atoms[1][0] = 'C1'
gpname, orig, axes = geom.detect_symm(atoms, {'C':'ccpvdz','C1':'sto3g','N':'631g'})
self.assertEqual(gpname, 'C2v')
gpname, axes = geom.subgroup(gpname, axes)
atoms = geom.shift_atom(atoms, orig, axes)
self.assertEqual(gpname, 'C2v')
self.assertTrue(geom.check_given_symm('C2v', atoms))
self.assertEqual(geom.symm_identical_atoms(gpname, atoms),
[[0,2],[1],[3,4],[5,6]])
def test_d6h(self):
atoms = ringhat(6, u)
atoms = atoms[6:]
gpname, orig, axes = geom.detect_symm(atoms)
self.assertEqual(gpname, 'D6h')
gpname, axes = geom.subgroup(gpname, axes)
atoms = geom.shift_atom(atoms, orig, axes)
self.assertEqual(geom.symm_identical_atoms(gpname, atoms),
[[0,3],[1,2,4,5],[6,7]])
self.assertTrue(geom.check_given_symm('D2h', atoms))
def test_d6h_1(self):
atoms = ringhat(6, u)
atoms = atoms[6:]
atoms[1][0] = 'C1'
atoms[2][0] = 'C1'
basis = {'C': 'sto3g', 'N':'sto3g', 'C1':'sto3g'}
gpname, orig, axes = geom.detect_symm(atoms, basis)
self.assertEqual(gpname, 'D6h')
gpname, axes = geom.subgroup(gpname, axes)
atoms = geom.shift_atom(atoms, orig, axes)
self.assertEqual(geom.symm_identical_atoms(gpname, atoms),
[[0,3],[1,2,4,5],[6,7]])
self.assertTrue(geom.check_given_symm('D2h', atoms, basis))
def test_c2v(self):
atoms = ringhat(6, u)
atoms = atoms[6:]
atoms[1][0] = 'C1'
atoms[2][0] = 'C1'
basis = {'C': 'sto3g', 'N':'sto3g', 'C1':'ccpvdz'}
gpname, orig, axes = geom.detect_symm(atoms, basis)
self.assertEqual(gpname, 'C2v')
gpname, axes = geom.subgroup(gpname, axes)
atoms = geom.shift_atom(atoms, orig, axes)
self.assertEqual(geom.symm_identical_atoms(gpname, atoms),
[[0, 3], [1, 2], [4, 5], [6, 7]])
self.assertTrue(geom.check_given_symm('C2', atoms, basis))
def test_s4(self):
atoms = [['C', ( 0.5, 0 , 1)],
['O', ( 0.4, 0.2 , 1)],
['C', ( -0.5, 0 , 1)],
['O', ( -0.4, -0.2 , 1)],
['C', ( 0 , 0.5 , -1)],
['O', ( -0.2, 0.4 , -1)],
['C', ( 0 , -0.5 , -1)],
['O', ( 0.2, -0.4 , -1)]]
gpname, orig, axes = geom.detect_symm(atoms)
self.assertEqual(gpname, 'S4')
gpname, axes = geom.subgroup(gpname, axes)
atoms = geom.shift_atom(atoms, orig, axes)
self.assertEqual(geom.symm_identical_atoms(gpname, atoms),
[[0, 2], [1, 3], [4, 6], [5, 7]])
self.assertTrue(geom.check_given_symm('C2', atoms))
def test_s6(self):
rotz = random_rotz()
atoms = ringhat(3, 1)[3:6] + ringhat(3, rotz)[:3]
rotz[2,2] = -1
rotz[:2,:2] = numpy.array(((.5, numpy.sqrt(3)/2),(-numpy.sqrt(3)/2, .5)))
r = numpy.dot([x[1] for x in atoms], rotz) - numpy.array((0,0,3.5))
atoms += list(zip([x[0] for x in atoms], r))
gpname, orig, axes = geom.detect_symm(atoms)
self.assertEqual(gpname, 'S6')
gpname, axes = geom.subgroup(gpname, axes)
self.assertEqual(gpname, 'C3')
def test_c5h(self):
atoms = ringhat(5, u)
gpname, orig, axes = geom.detect_symm(atoms)
self.assertEqual(gpname, 'C5h')
gpname, axes = geom.subgroup(gpname, axes)
atoms = geom.shift_atom(atoms, orig, axes)
self.assertEqual(gpname, 'Cs')
self.assertTrue(geom.check_given_symm('Cs', atoms))
self.assertEqual(geom.symm_identical_atoms(gpname, atoms),
[[0], [1], [2], [3], [4], [5], [6], [7], [8], [9], [10,11]])
def test_c5(self):
atoms = ringhat(5, u)[:-1]
gpname, orig, axes = geom.detect_symm(atoms)
self.assertEqual(gpname, 'C5')
gpname, axes = geom.subgroup(gpname, axes)
self.assertEqual(gpname, 'C1')
def test_c5v(self):
atoms = ringhat(5, u)[5:-1]
gpname, orig, axes = geom.detect_symm(atoms)
self.assertEqual(gpname, 'C5v')
gpname, axes = geom.subgroup(gpname, axes)
atoms = geom.shift_atom(atoms, orig, axes)
self.assertEqual(gpname, 'Cs')
self.assertTrue(geom.check_given_symm('Cs', atoms))
self.assertEqual(geom.symm_identical_atoms(gpname, atoms),
[[0, 4], [1, 3], [2], [5]])
def test_ih1(self):
coords = numpy.dot(make60(1.5, 1), u)
atoms = [['C', c] for c in coords]
gpname, orig, axes = geom.detect_symm(atoms)
self.assertEqual(gpname, 'Ih')
gpname, axes = geom.subgroup(gpname, axes)
atoms = geom.shift_atom(atoms, orig, axes)
self.assertEqual(gpname, 'Ci')
self.assertTrue(geom.check_given_symm('Ci', atoms))
self.assertEqual(geom.symm_identical_atoms(gpname, atoms),
[[0, 55], [1, 56], [2, 57], [3, 58], [4, 59],
[5, 30], [6, 31], [7, 32], [8, 33], [9, 34],
[10, 35], [11, 36], [12, 37], [13, 38], [14, 39],
[15, 40], [16, 41], [17, 42], [18, 43], [19, 44],
[20, 45], [21, 46], [22, 47], [23, 48], [24, 49],
[25, 50], [26, 51], [27, 52], [28, 53], [29, 54]])
def test_ih2(self):
coords1 = numpy.dot(make60(1.5, 3), u)
coords2 = numpy.dot(make12(1.1), u)
atoms = [['C', c] for c in coords1] + [['C', c] for c in coords2]
gpname, orig, axes = geom.detect_symm(atoms)
self.assertEqual(gpname, 'Ih')
gpname, axes = geom.subgroup(gpname, axes)
atoms = geom.shift_atom(atoms, orig, axes)
self.assertEqual(gpname, 'Ci')
self.assertTrue(geom.check_given_symm('Ci', atoms))
def test_ih3(self):
coords1 = numpy.dot(make20(1.5), u)
atoms = [['C', c] for c in coords1]
gpname, orig, axes = geom.detect_symm(atoms)
self.assertEqual(gpname, 'Ih')
gpname, axes = geom.subgroup(gpname, axes)
atoms = geom.shift_atom(atoms, orig, axes)
self.assertEqual(gpname, 'Ci')
self.assertTrue(geom.check_given_symm('Ci', atoms))
def test_ih4(self):
coords1 = make12(1.5)
atoms = [['C', c] for c in coords1]
gpname, orig, axes = geom.detect_symm(atoms)
self.assertEqual(gpname, 'Ih')
gpname, axes = geom.subgroup(gpname, axes)
atoms = geom.shift_atom(atoms, orig, axes)
self.assertEqual(gpname, 'Ci')
self.assertTrue(geom.check_given_symm('Ci', atoms))
def test_d5d_1(self):
coords1 = numpy.dot(make20(2.0), u)
coords2 = numpy.dot(make12(1.1), u)
atoms = [['C', c] for c in coords1] + [['C', c] for c in coords2]
gpname, orig, axes = geom.detect_symm(atoms)
self.assertEqual(gpname, 'D5d')
def test_s10(self):
numpy.random.seed(19)
rotz = numpy.eye(3)
rotz[:2,:2] = numpy.linalg.svd(numpy.random.random((2,2)))[0]
coords1 = numpy.dot(make60(1.5, 3.0), u)
coords2 = reduce(numpy.dot, (make20(1.1), rotz, u))
atoms = [['C', c] for c in coords1] + [['C', c] for c in coords2]
gpname, orig, axes = geom.detect_symm(atoms)
self.assertEqual(gpname, 'S10')
def test_oh1(self):
coords1 = numpy.dot(make6(1.5), u)
atoms = [['C', c] for c in coords1]
gpname, orig, axes = geom.detect_symm(atoms)
self.assertEqual(gpname, 'Oh')
gpname, axes = geom.subgroup(gpname, axes)
atoms = geom.shift_atom(atoms, orig, axes)
self.assertEqual(gpname, 'D2h')
self.assertTrue(geom.check_given_symm('D2h', atoms))
def test_oh2(self):
coords1 = numpy.dot(make8(1.5), u)
atoms = [['C', c] for c in coords1]
gpname, orig, axes = geom.detect_symm(atoms)
self.assertEqual(gpname, 'Oh')
gpname, axes = geom.subgroup(gpname, axes)
atoms = geom.shift_atom(atoms, orig, axes)
self.assertEqual(gpname, 'D2h')
self.assertTrue(geom.check_given_symm('D2h', atoms))
def test_oh3(self):
coords1 = numpy.dot(make8(1.5), u)
coords2 = numpy.dot(make6(1.5), u)
atoms = [['C', c] for c in coords1] + [['C', c] for c in coords2]
gpname, orig, axes = geom.detect_symm(atoms)
self.assertEqual(gpname, 'Oh')
gpname, axes = geom.subgroup(gpname, axes)
atoms = geom.shift_atom(atoms, orig, axes)
self.assertEqual(gpname, 'D2h')
self.assertTrue(geom.check_given_symm('D2h', atoms))
def test_c4h(self):
coords1 = numpy.dot(make8(1.5), u)
coords2 = numpy.dot(make6(1.5).dot(random_rotz()), u)
atoms = [['C', c] for c in coords1] + [['C', c] for c in coords2]
gpname, orig, axes = geom.detect_symm(atoms)
self.assertEqual(gpname, 'C4h')
def test_c2h(self):
coords1 = numpy.dot(make8(2.5), u)
coords2 = numpy.dot(make20(1.2), u)
atoms = [['C', c] for c in numpy.vstack((coords1,coords2))]
gpname, orig, axes = geom.detect_symm(atoms)
self.assertEqual(gpname, 'C2h')
def test_c1(self):
coords1 = numpy.dot(make4(2.5), u)
coords2 = make20(1.2)
atoms = [['C', c] for c in numpy.vstack((coords1,coords2))]
gpname, orig, axes = geom.detect_symm(atoms)
self.assertEqual(gpname, 'C1')
def test_c2(self):
coords1 = numpy.dot(make4(2.5), 1)
coords2 = make12(1.2)
axes = geom._make_axes(coords2[1]-coords2[0], coords2[2])
coords2 = numpy.dot(coords2, axes.T)
atoms = [['C', c] for c in numpy.vstack((coords1,coords2))]
gpname, orig, axes = geom.detect_symm(atoms)
self.assertEqual(gpname, 'C2')
def test_ci(self):
coords1 = numpy.dot(make8(2.5), u)
coords2 = numpy.dot(numpy.dot(make20(1.2), random_rotz()), u)
atoms = [['C', c] for c in numpy.vstack((coords1,coords2))]
gpname, orig, axes = geom.detect_symm(atoms)
self.assertEqual(gpname, 'Ci')
def test_cs(self):
coords1 = make4(2.5)
axes = geom._make_axes(coords1[1]-coords1[0], coords1[2])
coords1 = numpy.dot(coords1, axes.T)
coords2 = make12(1.2)
axes = geom._make_axes(coords2[1]-coords2[0], coords2[2])
coords2 = numpy.dot(coords2, axes.T)
atoms = [['C', c] for c in numpy.vstack((coords1,coords2))]
gpname, orig, axes = geom.detect_symm(atoms)
self.assertEqual(gpname, 'Cs')
numpy.random.seed(1)
c0 = numpy.random.random((4,3))
c0[:,1] *= .5
c1 = c0.copy()
c1[:,1] *= -1
atoms = [['C', c] for c in numpy.vstack((c0,c1))]
gpname, orig, axes = geom.detect_symm(atoms)
self.assertEqual(gpname, 'Cs')
def test_td1(self):
coords1 = numpy.dot(make4(1.5), u)
atoms = [['C', c] for c in coords1]
gpname, orig, axes = geom.detect_symm(atoms)
self.assertEqual(gpname, 'Td')
gpname, axes = geom.subgroup(gpname, axes)
atoms = geom.shift_atom(atoms, orig, axes)
self.assertEqual(gpname, 'D2')
self.assertTrue(geom.check_given_symm('D2', atoms))
self.assertEqual(geom.symm_identical_atoms(gpname, atoms),
[[0, 1, 2, 3]])
def test_td2(self):
coords1 = numpy.dot(make4(1.5), u)
coords2 = numpy.dot(make4(1.9), u)
atoms = [['C', c] for c in coords1] + [['C', c] for c in coords2]
gpname, orig, axes = geom.detect_symm(atoms)
self.assertEqual(gpname, 'Td')
gpname, axes = geom.subgroup(gpname, axes)
atoms = geom.shift_atom(atoms, orig, axes)
self.assertEqual(gpname, 'D2')
self.assertTrue(geom.check_given_symm('D2', atoms))
self.assertEqual(geom.symm_identical_atoms(gpname, atoms),
[[0, 1, 2, 3], [4, 5, 6, 7]])
def test_c3v(self):
coords1 = numpy.dot(make4(1.5), u)
coords2 = numpy.dot(make4(1.9), u)
atoms = [['C', c] for c in coords1] + [['C', c] for c in coords2]
atoms[2][0] = 'C1'
gpname, orig, axes = geom.detect_symm(atoms)
self.assertEqual(gpname, 'C3v')
gpname, axes = geom.subgroup(gpname, axes)
atoms = geom.shift_atom(atoms, orig, axes)
self.assertEqual(gpname, 'Cs')
self.assertTrue(geom.check_given_symm('Cs', atoms))
self.assertEqual(geom.symm_identical_atoms(gpname, atoms),
[[0, 1], [2], [3], [4, 5], [6], [7]])
def test_c3v_1(self):
mol = gto.M(atom='''
C 0.948065 -0.081406 -0.007893
C 0.462608 -0.144439 1.364854
N 0.077738 -0.194439 2.453356
H 0.591046 0.830035 -0.495369
H 0.591062 -0.944369 -0.576807
H 2.041481 -0.080642 -0.024174''')
gpname, orig, axes = geom.detect_symm(mol._atom)
self.assertEqual(gpname, 'C1')
with lib.temporary_env(geom, TOLERANCE=1e-3):
gpname, orig, axes = geom.detect_symm(mol._atom)
self.assertEqual(gpname, 'C3v')
def test_t(self):
atoms = [['C', ( 1.0 ,-1.0 , 1.0 )],
['O', ( 1.0-.1,-1.0+.2, 1.0 )],
['O', ( 1.0 ,-1.0+.1, 1.0-.2)],
['O', ( 1.0-.2,-1.0 , 1.0-.1)],
['C', (-1.0 , 1.0 , 1.0 )],
['O', (-1.0+.1, 1.0-.2, 1.0 )],
['O', (-1.0 , 1.0-.1, 1.0-.2)],
['O', (-1.0+.2, 1.0 , 1.0-.1)],
['C', ( 1.0 , 1.0 ,-1.0 )],
['O', ( 1.0-.2, 1.0 ,-1.0+.1)],
['O', ( 1.0 , 1.0-.1,-1.0+.2)],
['O', ( 1.0-.1, 1.0-.2,-1.0 )],
['C', (-1.0 ,-1.0 ,-1.0 )],
['O', (-1.0 ,-1.0+.1,-1.0+.2)],
['O', (-1.0+.2,-1.0 ,-1.0+.1)],
['O', (-1.0+.1,-1.0+.2,-1.0 )]]
gpname, orig, axes = geom.detect_symm(atoms)
self.assertEqual(gpname, 'T')
gpname, axes = geom.subgroup(gpname, axes)
atoms = geom.shift_atom(atoms, orig, axes)
self.assertEqual(gpname, 'D2')
self.assertTrue(geom.check_given_symm('D2', atoms))
self.assertEqual(geom.symm_identical_atoms(gpname, atoms),
[[0, 4, 8, 12], [1, 5, 11, 15],
[2, 6, 10, 13], [3, 7, 9, 14]])
def test_th(self):
atoms = [['C', ( 1.0 ,-1.0 , 1.0 )],
['O', ( 1.0-.1,-1.0+.2, 1.0 )],
['O', ( 1.0 ,-1.0+.1, 1.0-.2)],
['O', ( 1.0-.2,-1.0 , 1.0-.1)],
['C', ( 1.0 , 1.0 , 1.0 )],
['O', ( 1.0-.1, 1.0-.2, 1.0 )],
['O', ( 1.0 , 1.0-.1, 1.0-.2)],
['O', ( 1.0-.2, 1.0 , 1.0-.1)],
['C', (-1.0 , 1.0 , 1.0 )],
['O', (-1.0+.1, 1.0-.2, 1.0 )],
['O', (-1.0 , 1.0-.1, 1.0-.2)],
['O', (-1.0+.2, 1.0 , 1.0-.1)],
['C', (-1.0 ,-1.0 , 1.0 )],
['O', (-1.0+.1,-1.0+.2, 1.0 )],
['O', (-1.0 ,-1.0+.1, 1.0-.2)],
['O', (-1.0+.2,-1.0 , 1.0-.1)],
['C', ( 1.0 ,-1.0 ,-1.0 )],
['O', ( 1.0-.2,-1.0 ,-1.0+.1)],
['O', ( 1.0 ,-1.0+.1,-1.0+.2)],
['O', ( 1.0-.1,-1.0+.2,-1.0 )],
['C', ( 1.0 , 1.0 ,-1.0 )],
['O', ( 1.0-.2, 1.0 ,-1.0+.1)],
['O', ( 1.0 , 1.0-.1,-1.0+.2)],
['O', ( 1.0-.1, 1.0-.2,-1.0 )],
['C', (-1.0 , 1.0 ,-1.0 )],
['O', (-1.0+.2, 1.0 ,-1.0+.1)],
['O', (-1.0 , 1.0-.1,-1.0+.2)],
['O', (-1.0+.1, 1.0-.2,-1.0 )],
['C', (-1.0 ,-1.0 ,-1.0 )],
['O', (-1.0 ,-1.0+.1,-1.0+.2)],
['O', (-1.0+.2,-1.0 ,-1.0+.1)],
['O', (-1.0+.1,-1.0+.2,-1.0 )]]
gpname, orig, axes = geom.detect_symm(atoms)
self.assertEqual(gpname, 'Th')
gpname, axes = geom.subgroup(gpname, axes)
atoms = geom.shift_atom(atoms, orig, axes)
self.assertEqual(gpname, 'D2')
self.assertTrue(geom.check_given_symm('D2', atoms))
self.assertEqual(geom.symm_identical_atoms(gpname, atoms),
[[0, 8, 20, 28], [1, 9, 23, 31],
[2, 10, 22, 29], [3, 11, 21, 30],
[4, 12, 16, 24], [5, 13, 19, 27],
[6, 14, 18, 26], [7, 15, 17, 25]])
def test_s4(self):
coords1 = numpy.dot(make4(1.5), u)
coords2 = numpy.dot(numpy.dot(make4(2.4), random_rotz()), u)
atoms = [['C', c] for c in coords1] + [['C', c] for c in coords2]
gpname, orig, axes = geom.detect_symm(atoms)
self.assertEqual(gpname, 'S4')
def test_Dooh(self):
atoms = [['H', (0,0,0)], ['H', (0,0,-1)], ['H1', (0,0,1)]]
basis = {'H':'sto3g'}
gpname, orig, axes = geom.detect_symm(atoms, basis)
self.assertEqual(gpname, 'Dooh')
self.assertEqual(geom.symm_identical_atoms(gpname, atoms),
[[0], [1,2]])
gpname, axes = geom.subgroup(gpname, axes)
atoms = geom.shift_atom(atoms, orig, axes)
self.assertEqual(gpname, 'Dooh')
self.assertTrue(geom.check_given_symm('Dooh', atoms, basis))
self.assertTrue(geom.check_given_symm('D2h', atoms, basis))
self.assertEqual(geom.symm_identical_atoms(gpname, atoms),
[[0], [1,2]])
def test_Coov(self):
atoms = [['H', (0,0,0)], ['H', (0,0,-1)], ['H1', (0,0,1)]]
basis = {'H':'sto3g', 'H1':'6-31g'}
gpname, orig, axes = geom.detect_symm(atoms, basis)
self.assertEqual(gpname, 'Coov')
self.assertEqual(geom.symm_identical_atoms(gpname, atoms),
[[0], [1] ,[2]])
gpname, axes = geom.subgroup(gpname, axes)
atoms = geom.shift_atom(atoms, orig, axes)
self.assertEqual(gpname, 'Coov')
self.assertTrue(geom.check_given_symm('Coov', atoms, basis))
self.assertTrue(geom.check_given_symm('C2v', atoms, basis))
self.assertEqual(geom.symm_identical_atoms(gpname, atoms),
[[0], [1], [2]])
def test_d5(self):
coord1 = ring(5)
coord2 = ring(5, .1)
coord1[:,2] = 1
coord2[:,2] =-1
atoms = [['H', c] for c in numpy.vstack((coord1,coord2))]
gpname, orig, axes = geom.detect_symm(atoms)
self.assertEqual(gpname, 'D5')
gpname, axes = geom.subgroup(gpname, axes)
atoms = geom.shift_atom(atoms, orig, axes)
self.assertEqual(gpname, 'C2')
self.assertTrue(geom.check_given_symm('C2', atoms))
self.assertEqual(geom.symm_identical_atoms(gpname, atoms),
[[0, 5], [1, 9], [2, 8], [3, 7], [4, 6]])
def test_d5d(self):
coord1 = ring(5)
coord2 = ring(5, numpy.pi/5)
coord1[:,2] = 1
coord2[:,2] =-1
atoms = [['H', c] for c in numpy.vstack((coord1,coord2))]
gpname, orig, axes = geom.detect_symm(atoms)
self.assertEqual(gpname, 'D5d')
gpname, axes = geom.subgroup(gpname, axes)
atoms = geom.shift_atom(atoms, orig, axes)
self.assertEqual(gpname, 'C2h')
self.assertTrue(geom.check_given_symm('C2h', atoms))
self.assertEqual(geom.symm_identical_atoms(gpname, atoms),
[[0, 3, 5, 7], [1, 2, 8, 9], [4, 6]])
def test_detect_symm_c2v(self):
atoms = [['H' , (1., 0., 2.)],
['He', (0., 1., 0.)],
['H' , (-2.,0.,-1.)],
['He', (0.,-1., 0.)]]
l, orig, axes = geom.detect_symm(atoms)
self.assertEqual(l, 'C2v')
atoms = geom.shift_atom(atoms, orig, axes)
self.assertTrue(geom.check_given_symm('C2v', atoms))
self.assertEqual(geom.symm_identical_atoms(l,atoms), [[0,2],[1,3]])
def test_detect_symm_d2h_a(self):
atoms = [['He', (0., 1., 0.)],
['H' , (1., 0., 0.)],
['H' , (-1.,0., 0.)],
['He', (0.,-1., 0.)]]
l, orig, axes = geom.detect_symm(atoms)
self.assertEqual(l, 'D2h')
atoms = geom.shift_atom(atoms, orig, axes)
self.assertTrue(geom.check_given_symm('D2h', atoms))
self.assertEqual(geom.symm_identical_atoms(l,atoms),
[[0, 3], [1, 2]])
def test_detect_symm_d2h_b(self):
atoms = [['H' , (1., 0., 2.)],
['He', (0., 1., 0.)],
['H' , (-1.,0.,-2.)],
['He', (0.,-1., 0.)]]
l, orig, axes = geom.detect_symm(atoms)
self.assertEqual(l, 'D2h')
atoms = geom.shift_atom(atoms, orig, axes)
self.assertTrue(geom.check_given_symm('D2h', atoms))
self.assertEqual(geom.symm_identical_atoms(l,atoms), [[0,2],[1,3]])
def test_detect_symm_c2h_a(self):
atoms = [['H' , (1., 0., 2.)],
['He', (0., 1.,-1.)],
['H' , (-1.,0.,-2.)],
['He', (0.,-1., 1.)]]
l, orig, axes = geom.detect_symm(atoms)
atoms = geom.shift_atom(atoms, orig, axes)
self.assertEqual(l, 'C2h')
self.assertEqual(geom.symm_identical_atoms(l,atoms), [[0,2],[1,3]])
self.assertTrue(geom.check_given_symm('C2h', atoms))
def test_detect_symm_c2h(self):
atoms = [['H' , (1., 0., 2.)],
['He', (0., 1., 0.)],
['H' , (1., 0., 0.)],
['H' , (-1.,0., 0.)],
['H' , (-1.,0.,-2.)],
['He', (0.,-1., 0.)]]
l, orig, axes = geom.detect_symm(atoms)
atoms = geom.shift_atom(atoms, orig, axes)
self.assertEqual(l, 'C2h')
self.assertEqual(geom.symm_identical_atoms(l,atoms), [[0,4],[1,5],[2,3]])
self.assertTrue(geom.check_given_symm('C2h', atoms))
atoms = [['H' , (1., 0., 1.)],
['H' , (1., 0.,-1.)],
['He', (0., 0., 2.)],
['He', (2., 0.,-2.)],
['Li', (1., 1., 0.)],
['Li', (1.,-1., 0.)]]
l, orig, axes = geom.detect_symm(atoms)
atoms = geom.shift_atom(atoms, orig, axes)
self.assertEqual(l, 'C2h')
self.assertEqual(geom.symm_identical_atoms(l,atoms),
[[0, 1], [2, 3], [4, 5]])
self.assertTrue(geom.check_given_symm('C2h', atoms))
def test_detect_symm_d2_a(self):
atoms = [['H' , (1., 0., 1.)],
['H' , (1., 0.,-1.)],
['He', (0., 0., 2.)],
['He', (2., 0., 2.)],
['He', (1., 1.,-2.)],
['He', (1.,-1.,-2.)]]
l, orig, axes = geom.detect_symm(atoms)
self.assertEqual(l, 'D2d')
atoms = geom.shift_atom(atoms, orig, axes)
self.assertTrue(geom.check_given_symm('D2', atoms))
self.assertEqual(geom.symm_identical_atoms('D2', atoms),
[[0, 1], [2, 3, 4, 5]])
def test_detect_symm_d2_b(self):
s2 = numpy.sqrt(.5)
atoms = [['C', (0., 0., 1.)],
['C', (0., 0.,-1.)],
['H', ( 1, 0., 2.)],
['H', (-1, 0., 2.)],
['H', ( s2, s2,-2.)],
['H', (-s2,-s2,-2.)]]
l, orig, axes = geom.detect_symm(atoms)
self.assertEqual(l, 'D2')
atoms = geom.shift_atom(atoms, orig, axes)
self.assertTrue(geom.check_given_symm('D2', atoms))
self.assertEqual(geom.symm_identical_atoms(l,atoms),
[[0, 1], [2, 3, 4, 5]])
def test_detect_symm_s4(self):
atoms = [['H', (-1,-1.,-2.)],
['H', ( 1, 1.,-2.)],
['C', (-.9,-1.,-2.)],
['C', (.9, 1.,-2.)],
['H', ( 1,-1., 2.)],
['H', (-1, 1., 2.)],
['C', ( 1,-.9, 2.)],
['C', (-1, .9, 2.)],]
l, orig, axes = geom.detect_symm(atoms)
self.assertEqual(l, 'S4')
atoms = geom.shift_atom(atoms, orig, axes)
self.assertTrue(geom.check_given_symm('C2', atoms))
self.assertEqual(geom.symm_identical_atoms('C2',atoms),
[[0, 1], [2, 3], [4, 5], [6, 7]])
def test_detect_symm_ci(self):
atoms = [['H' , ( 1., 0., 0.)],
['He', ( 0., 1., 0.)],
['Li', ( 0., 0., 1.)],
['Be', ( .5, .5, .5)],
['H' , (-1., 0., 0.)],
['He', ( 0.,-1., 0.)],
['Li', ( 0., 0.,-1.)],
['Be', (-.5,-.5,-.5)]]
l, orig, axes = geom.detect_symm(atoms)
atoms = geom.shift_atom(atoms, orig, axes)
self.assertEqual(l, 'Ci')
self.assertTrue(geom.check_given_symm('Ci', atoms))
self.assertEqual(geom.symm_identical_atoms(l,atoms),
[[0, 4], [1, 5], [2, 6], [3, 7]])
def test_detect_symm_cs1(self):
atoms = [['H' , (1., 2., 0.)],
['He', (1., 0., 0.)],
['Li', (2.,-1., 0.)],
['Be', (0., 1., 0.)]]
l, orig, axes = geom.detect_symm(atoms)
atoms = geom.shift_atom(atoms, orig, axes)
self.assertEqual(l, 'Cs')
self.assertTrue(geom.check_given_symm('Cs', atoms))
self.assertEqual(geom.symm_identical_atoms(l,atoms),
[[0], [1], [2], [3]])
def test_detect_symm_cs2(self):
atoms = [['H' , (0., 1., 2.)],
['He', (0., 1., 0.)],
['Li', (0., 2.,-1.)],
['Be', (0., 0., 1.)],
['S' , (-3, 1., .5)],
['S' , ( 3, 1., .5)]]
coord = numpy.dot([a[1] for a in atoms], u)
atoms = [[atoms[i][0], c] for i,c in enumerate(coord)]
l, orig, axes = geom.detect_symm(atoms)
atoms = geom.shift_atom(atoms, orig, axes)
self.assertEqual(l, 'Cs')
self.assertTrue(geom.check_given_symm('Cs', atoms))
self.assertEqual(geom.symm_identical_atoms(l,atoms),
[[0], [1], [2], [3], [4, 5]])
def test_detect_symm_cs3(self):
atoms = [['H' , ( 2.,1., 0.)],
['He', ( 0.,1., 0.)],
['Li', (-1.,2., 0.)],
['Be', ( 1.,0., 0.)],
['S' , ( .5,1., -3)],
['S' , ( .5,1., 3)]]
coord = numpy.dot([a[1] for a in atoms], u)
atoms = [[atoms[i][0], c] for i,c in enumerate(coord)]
l, orig, axes = geom.detect_symm(atoms)
atoms = geom.shift_atom(atoms, orig, axes)
self.assertEqual(l, 'Cs')
self.assertTrue(geom.check_given_symm('Cs', atoms))
self.assertEqual(geom.symm_identical_atoms(l,atoms),
[[0], [1], [2], [3], [4, 5]])
def test_detect_symm_c1(self):
atoms = [['H' , ( 1., 0., 0.)],
['He', ( 0., 1., 0.)],
['Li', ( 0., 0., 1.)],
['Be', ( .5, .5, .5)]]
l, orig, axes = geom.detect_symm(atoms)
atoms = geom.shift_atom(atoms, orig, axes)
self.assertEqual(l, 'C1')
self.assertTrue(geom.check_given_symm('C1', atoms))
self.assertEqual(geom.symm_identical_atoms(l,atoms),
[[0], [1], [2], [3]])
def test_detect_symm_c2(self):
atoms = [['H' , ( 1., 0., 1.)],
['H' , ( 1., 0.,-1.)],
['He', ( 0.,-3., 2.)],
['He', ( 0., 3.,-2.)]]
l, orig, axes = geom.detect_symm(atoms)
atoms = geom.shift_atom(atoms, orig, axes)
self.assertEqual(l, 'C2')
self.assertTrue(geom.check_given_symm('C2', atoms))
self.assertEqual(geom.symm_identical_atoms(l,atoms), [[0,1],[2,3]])
def test_detect_symm_d3d(self):
atoms = [
['C', ( 1.25740, -0.72596, -0.25666)],
['C', ( 1.25740, 0.72596, 0.25666)],
['C', ( 0.00000, 1.45192, -0.25666)],
['C', (-1.25740, 0.72596, 0.25666)],
['C', (-1.25740, -0.72596, -0.25666)],
['C', ( 0.00000, -1.45192, 0.25666)],
['H', ( 2.04168, -1.17876, 0.05942)],
['H', ( 1.24249, -0.71735, -1.20798)],
['H', ( 2.04168, 1.17876, -0.05942)],
['H', ( 1.24249, 0.71735, 1.20798)],
['H', ( 0.00000, 1.43470, -1.20798)],
['H', ( 0.00000, 2.35753, 0.05942)],
['H', (-2.04168, 1.17876, -0.05942)],
['H', (-1.24249, 0.71735, 1.20798)],
['H', (-1.24249, -0.71735, -1.20798)],
['H', (-2.04168, -1.17876, 0.05942)],
['H', ( 0.00000, -1.43470, 1.20798)],
['H', ( 0.00000, -2.35753, -0.05942)], ]
with lib.temporary_env(geom, TOLERANCE=1e-4):
l, orig, axes = geom.detect_symm(atoms)
self.assertEqual(l, 'D3d')
def test_quasi_c2v(self):
atoms = [
['Fe', ( 0.0000000000, 0.0055197721, 0.0055197721)],
['O' , (-1.3265475500, 0.0000000000, -0.9445024777)],
['O' , ( 1.3265475500, 0.0000000000, -0.9445024777)],
['O' , ( 0.0000000000, -1.3265374484, 0.9444796669)],
['O' , ( 0.0000000000, 1.3265374484, 0.9444796669)],]
l, orig, axes = geom.detect_symm(atoms)
self.assertEqual(l, 'Cs')
with lib.temporary_env(geom, TOLERANCE=1e-2):
l, orig, axes = geom.detect_symm(atoms)
self.assertEqual(l, 'C2v')
with lib.temporary_env(geom, TOLERANCE=1e-1):
l, orig, axes = geom.detect_symm(atoms)
self.assertEqual(l, 'Td')
def test_as_subgroup(self):
axes = numpy.eye(3)
g, ax = symm.as_subgroup('D2d', axes)
self.assertEqual(g, 'D2')
self.assertAlmostEqual(abs(ax-numpy.eye(3)).max(), 0, 12)
g, ax = symm.as_subgroup('D2d', axes, 'D2')
self.assertEqual(g, 'D2')
self.assertAlmostEqual(abs(ax-numpy.eye(3)).max(), 0, 12)
g, ax = symm.as_subgroup('D2d', axes, 'C2v')
self.assertEqual(g, 'C2v')
self.assertAlmostEqual(ax[0,1], numpy.sqrt(.5), 9)
self.assertAlmostEqual(ax[1,0],-numpy.sqrt(.5), 9)
g, ax = symm.as_subgroup('C2v', axes, 'Cs')
self.assertEqual(g, 'Cs')
self.assertAlmostEqual(ax[2,0], 1, 9)
g, ax = symm.as_subgroup('D6', axes)
self.assertEqual(g, 'D2')
g, ax = symm.as_subgroup('C6h', axes)
self.assertEqual(g, 'C2h')
g, ax = symm.as_subgroup('C6v', axes)
self.assertEqual(g, 'C2v')
g, ax = symm.as_subgroup('C6', axes)
self.assertEqual(g, 'C2')
def test_ghost(self):
atoms = [
['Fe' , ( 0.0, 0.0, 0.0)],
['O' , (-1.3, 0.0, 0.0)],
['GHOST-O' , ( 1.3, 0.0, 0.0)],
['GHOST-O' , ( 0.0, -1.3, 0.0)],
['O' , ( 0.0, 1.3, 0.0)],]
l, orig, axes = geom.detect_symm(atoms)
self.assertEqual(l, 'C2v')
self.assertAlmostEqual(axes[2,0]*axes[2,1], -.5)
atoms = [
['Fe' , ( 0.0, 0.0, 0.0)],
['O' , (-1.3, 0.0, 0.0)],
['XO' , ( 1.3, 0.0, 0.0)],
['GHOSTO' , ( 0.0, -1.3, 0.0)],
['O' , ( 0.0, 1.3, 0.0)],]
l, orig, axes = geom.detect_symm(atoms)
self.assertEqual(l, 'C2v')
self.assertAlmostEqual(axes[2,0]*axes[2,1], -.5)
atoms = [
['Fe' , ( 0.0, 0.0, 0.0)],
['O' , (-1.3, 0.0, 0.0)],
['X' , ( 1.3, 0.0, 0.0)],
['X' , ( 0.0, -1.3, 0.0)],
['O' , ( 0.0, 1.3, 0.0)],]
l, orig, axes = geom.detect_symm(atoms)
self.assertEqual(l, 'C2v')
self.assertAlmostEqual(axes[2,0]*axes[2,1], -.5)
def test_sort_coords(self):
c = numpy.random.random((5,3))
c0 = symm.sort_coords(c)
idx = symm.argsort_coords(c)
self.assertAlmostEqual(abs(c[idx] - c0).max(), 0, 9)
def ring(n, start=0):
r = 1. / numpy.sin(numpy.pi/n)
coord = []
for i in range(n):
theta = i * (2*numpy.pi/n)
coord.append([r*numpy.cos(theta+start), r*numpy.sin(theta+start), 0])
return numpy.array(coord)
def ringhat(n, u):
atoms = [['H', c] for c in ring(n)] \
+ [['C', c] for c in ring(n, .1)] \
+ [['N', [0,0, 1.3]],
['N', [0,0,-1.3]]]
c = numpy.dot([a[1] for a in atoms], u)
return [[atoms[i][0], c[i]] for i in range(len(atoms))]
def rotmatz(ang):
c = numpy.cos(ang)
s = numpy.sin(ang)
return numpy.array((( c, s, 0),
(-s, c, 0),
( 0, 0, 1),))
def rotmaty(ang):
c = numpy.cos(ang)
s = numpy.sin(ang)
return numpy.array((( c, 0, s),
( 0, 1, 0),
(-s, 0, c),))
def r2edge(ang, r):
return 2*r*numpy.sin(ang/2)
def make60(b5, b6):
theta1 = numpy.arccos(1/numpy.sqrt(5))
theta2 = (numpy.pi - theta1) * .5
r = (b5*2+b6)/2/numpy.sin(theta1/2)
rot72 = rotmatz(numpy.pi*2/5)
s1 = numpy.sin(theta1)
c1 = numpy.cos(theta1)
s2 = numpy.sin(theta2)
c2 = numpy.cos(theta2)
p1 = numpy.array(( s2*b5, 0, r-c2*b5))
p9 = numpy.array((-s2*b5, 0,-r+c2*b5))
p2 = numpy.array(( s2*(b5+b6), 0, r-c2*(b5+b6)))
rot1 = reduce(numpy.dot, (rotmaty(theta1), rot72, rotmaty(-theta1)))
p2s = []
for i in range(5):
p2s.append(p2)
p2 = numpy.dot(p2, rot1)
coord = []
for i in range(5):
coord.append(p1)
p1 = numpy.dot(p1, rot72)
for pj in p2s:
pi = pj
for i in range(5):
coord.append(pi)
pi = numpy.dot(pi, rot72)
for pj in p2s:
pi = pj
for i in range(5):
coord.append(-pi)
pi = numpy.dot(pi, rot72)
for i in range(5):
coord.append(p9)
p9 = numpy.dot(p9, rot72)
return numpy.array(coord)
def make12(b):
theta1 = numpy.arccos(1/numpy.sqrt(5))
theta2 = (numpy.pi - theta1) * .5
r = b/2./numpy.sin(theta1/2)
rot72 = rotmatz(numpy.pi*2/5)
s1 = numpy.sin(theta1)
c1 = numpy.cos(theta1)
p1 = numpy.array(( s1*r, 0, c1*r))
p2 = numpy.array((-s1*r, 0, -c1*r))
coord = [( 0, 0, r)]
for i in range(5):
coord.append(p1)
p1 = numpy.dot(p1, rot72)
for i in range(5):
coord.append(p2)
p2 = numpy.dot(p2, rot72)
coord.append(( 0, 0, -r))
return numpy.array(coord)
def make20(b):
theta1 = numpy.arccos(numpy.sqrt(5)/3)
theta2 = numpy.arcsin(r2edge(theta1,1)/2/numpy.sin(numpy.pi/5))
r = b/2./numpy.sin(theta1/2)
rot72 = rotmatz(numpy.pi*2/5)
s2 = numpy.sin(theta2)
c2 = numpy.cos(theta2)
s3 = numpy.sin(theta1+theta2)
c3 = numpy.cos(theta1+theta2)
p1 = numpy.array(( s2*r, 0, c2*r))
p2 = numpy.array(( s3*r, 0, c3*r))
p3 = numpy.array((-s3*r, 0, -c3*r))
p4 = numpy.array((-s2*r, 0, -c2*r))
coord = []
for i in range(5):
coord.append(p1)
p1 = numpy.dot(p1, rot72)
for i in range(5):
coord.append(p2)
p2 = numpy.dot(p2, rot72)
for i in range(5):
coord.append(p3)
p3 = numpy.dot(p3, rot72)
for i in range(5):
coord.append(p4)
p4 = numpy.dot(p4, rot72)
return numpy.array(coord)
def make4(b):
coord = numpy.ones((4,3)) * b*.5
coord[1,0] = coord[1,1] = -b*.5
coord[2,2] = coord[2,1] = -b * .5
coord[3,0] = coord[3,2] = -b * .5
return coord
def make6(b):
coord = numpy.zeros((6,3))
coord[0,0] = coord[1,1] = coord[2,2] = b * .5
coord[3,0] = coord[4,1] = coord[5,2] =-b * .5
return coord
def make8(b):
coord = numpy.ones((8,3)) * b*.5
n = 0
for i in range(2):
for j in range(2):
for k in range(2):
coord[n,0] = (-1) ** i * b*.5
coord[n,1] = (-1) ** j * b*.5
coord[n,2] = (-1) ** k * b*.5
n += 1
return coord
def random_rotz(seed=19):
numpy.random.seed(seed)
rotz = numpy.eye(3)
rotz[:2,:2] = numpy.linalg.svd(numpy.random.random((2,2)))[0]
return rotz
if __name__ == "__main__":
print("Full Tests geom")
unittest.main()
|
gkc1000/pyscf
|
pyscf/symm/test/test_geom.py
|
Python
|
apache-2.0
| 38,355
|
[
"PySCF"
] |
366b5e14691b3a43a9b75c60fe3f3580980b3c999835736713f247b5cd0899a0
|
# Copyright 2014-2016 by Marco Galardini. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""This module provides classes to work with Phenotype Microarray data.
More information on the single plates can be found here: http://www.biolog.com/
Classes:
- PlateRecord - Object that contain time course data on each well of the
plate, as well as metadata (if any).
- WellRecord - Object that contains the time course data of a single well
- JsonWriter - Writer of PlateRecord objects in JSON format.
Functions:
- JsonIterator - Incremental PM JSON parser, this is an iterator that returns
PlateRecord objects.
- CsvIterator - Incremental PM CSV parser, this is an iterator that returns
PlateRecord objects.
- _toOPM - Used internally by JsonWriter, converts PlateRecord objects in
dictionaries ready to be serialized in JSON format.
"""
import warnings
import json
import csv
import numpy as np
from Bio._py3k import basestring
from Bio._py3k import _is_int_or_long
from Bio import BiopythonParserWarning
# Private csv headers - hardcoded because this are supposedly never changed
_datafile = 'Data File'
_plate = 'Plate Type'
_strainType = 'Strain Type'
_sample = 'Sample Number'
_strainName = 'Strain Name'
_strainNumber = 'Strain Number'
_other = 'Other'
_hour = 'Hour'
_file = 'File'
_position = 'Position'
_setupTime = 'Setup Time'
_platesPrefix = 'PM'
_platesPrefixMammalian = 'PM-M'
#
# Json identifiers - hardcoded as they are set by the creators of opm
_csvData = 'csv_data'
_measurements = 'measurements'
#
class PlateRecord(object):
"""PlateRecord object for storing Phenotype Microarray plates data.
A PlateRecord stores all the wells of a particular phenotype
Microarray plate, along with metadata (if any). The single wells can be
accessed calling their id as an index or iterating on the PlateRecord:
>>> from Bio import phenotype
>>> plate = phenotype.read("plate.csv", "pm-csv")
>>> well = plate['A05']
>>> for well in plate:
... print("%s" % well.id)
...
A01
A02
...
The plate rows and columns can be queried with an indexing system similar
to NumPy and other matrices:
>>> print(plate[1])
PlateRecord('WellRecord['B01'], WellRecord['B02'], WellRecord['B03'], ..., WellRecord['B12']')
>>> print(plate[:,1])
PlateRecord('WellRecord['A02'], WellRecord['B02'], WellRecord['C02'], ..., WellRecord['H02']')
Single WellRecord objects can be accessed using this indexing system:
>>> print(plate[1,2])
WellRecord('(0.0, 11.0), (0.25, 11.0), (0.5, 11.0), (0.75, 11.0), (1.0, 11.0), ..., (95.75, 11.0)')
The presence of a particular well can be inspected with the "in" keyword:
>>> 'A01' in plate
True
All the wells belonging to a "row" (identified by the first charachter of
the well id) in the plate can be obtained:
>>> for well in plate.get_row('H'):
... print("%s" % well.id)
...
H01
H02
H03
...
All the wells belonging to a "column" (identified by the number of the well)
in the plate can be obtained:
>>> for well in plate.get_column(12):
... print("%s" % well.id)
...
A01
B12
C12
...
Two PlateRecord objects can be compared: if all their wells are equal the
two plates are considered equal:
>>> plate2 = phenotype.read("plate.json", "pm-json")
>>> plate == plate2
True
Two PlateRecord object can be summed up or subracted from each other: the
the signals of each well will be summed up or subtracted. The id of the
left operand will be kept:
>>> plate3 = plate + plate2
>>> print(plate3.id)
PM01
Many Phenotype Microarray plate have a "negative control" well, which can
be subtracted to all wells:
>>> subplate = plate.subtract_control()
"""
def __init__(self, plateid, wells=None):
self.id = plateid
if wells is None:
wells = []
# Similar behaviour as GenBank
# Contains all the attributes
self.qualifiers = {}
# Well_id --> WellRecord objects
self._wells = {}
try:
for w in wells:
self._is_well(w)
self[w.id] = w
except TypeError:
raise TypeError('You must provide an iterator-like object ' +
'containing the single wells')
self._update()
def _update(self):
"""Update the rows and columns string identifiers."""
self._rows = sorted(set(x[0] for x in self._wells))
self._columns = sorted(set(x[1:] for x in self._wells))
def _is_well(self, obj):
"""Check if the given object is a WellRecord object
Used both for the class constructor and the __setitem__ method
"""
# Value should be of WellRecord type
if not isinstance(obj, WellRecord):
raise ValueError('A WellRecord type object is needed as value' +
' (got %s)' % type(obj))
def __getitem__(self, index):
"""Access part of the plate.
Depending on the indices, you can get a WellRecord object
(representing a single well of the plate),
or another plate
(representing some part or all of the original plate).
plate[wid] gives a WellRecord (if wid is a WellRecord id)
plate[r,c] gives a WellRecord
plate[r] gives a row as a PlateRecord
plate[r,:] gives a row as a PlateRecord
plate[:,c] gives a column as a PlateRecord
plate[:] and plate[:,:] give a copy of the plate
Anything else gives a subset of the original plate, e.g.
plate[0:2] or plate[0:2,:] uses only row 0 and 1
plate[:,1:3] uses only columns 1 and 2
plate[0:2,1:3] uses only rows 0 & 1 and only cols 1 & 2
>>> from Bio import phenotype
>>> plate = phenotype.read("plate.csv", "pm-csv")
You can access a well of the plate, using its id.
>>> w = plate['A01']
You can access a row of the plate as a PlateRecord using an integer
index:
>>> first_row = plate[0]
>>> print(first_row)
PlateRecord('WellRecord['A01'], WellRecord['A02'], WellRecord['A03'], ..., WellRecord['A12']')
>>> last_row = plate[-1]
>>> print(last_row)
PlateRecord('WellRecord['H01'], WellRecord['H02'], WellRecord['H03'], ..., WellRecord['H12']')
You can also access use python's slice notation to sub-plates
containing only some of the plate rows:
>>> sub_plate = plate[2:5]
>>> print(sub_plate)
PlateRecord('WellRecord['C01'], WellRecord['C02'], WellRecord['C03'], ..., WellRecord['E12']')
This includes support for a step, i.e. plate[start:end:step], which
can be used to select every second row:
>>> sub_plate = plate[::2]
You can also use two indices to specify both rows and columns.
Using simple integers gives you the single wells. e.g.
>>> w = plate[3, 4]
>>> print(w.id)
'D05'
To get a single column use this syntax:
>>> sub_plate = plate[:, 4]
>>> print(sub_plate)
PlateRecord('WellRecord['A05'], WellRecord['B05'], WellRecord['C05'], ..., WellRecord['H05']')
Or, to get part of a column,
>>> sub_plate = plate[1:3, 4]
>>> print(sub_plate)
PlateRecord(WellRecord['B05'], WellRecord['C05'])
However, in general you get a sub-plate,
>>> print(align[1:5, 3:6])
PlateRecord('WellRecord['B04'], WellRecord['B05'], WellRecord['B06'], ..., WellRecord['E06']')
This should all seem familiar to anyone who has used the NumPy
array or matrix objects.
"""
# Well identifier access
if isinstance(index, basestring):
try:
return self._wells[index]
except KeyError:
raise KeyError('Well %s not found!' % index)
# Integer index
elif isinstance(index, int):
try:
row = self._rows[index]
except IndexError:
raise IndexError('Row %d not found!' % index)
return PlateRecord(self.id,
filter(lambda x: x.id.startswith(row),
self._wells.values()))
# Slice
elif isinstance(index, slice):
rows = self._rows[index]
return PlateRecord(self.id,
filter(lambda x: x.id[0] in rows,
self._wells.values()))
# Other access
elif len(index) != 2:
raise TypeError('Invalid index type.')
row_index, col_index = index
if isinstance(row_index, int) and isinstance(col_index, int):
# Return a single WellRecord
try:
row = self._rows[row_index]
except IndexError:
raise IndexError('Row %d not found!' % row_index)
try:
col = self._columns[col_index]
except IndexError:
raise IndexError('Column %d not found!' % col_index)
return self._wells[row + col]
elif isinstance(row_index, int):
try:
row = self._rows[row_index]
except IndexError:
raise IndexError('Row %d not found!' % row_index)
cols = self._columns[col_index]
return PlateRecord(self.id,
filter(lambda x: x.id.startswith(row) and
x.id[1:] in cols,
self._wells.values()))
elif isinstance(col_index, int):
try:
col = self._columns[col_index]
except IndexError:
raise IndexError('Columns %d not found!' % col_index)
rows = self._rows[row_index]
return PlateRecord(self.id,
filter(lambda x: x.id.endswith(col) and
x.id[0] in rows,
self._wells.values()))
else:
rows = self._rows[row_index]
cols = self._columns[col_index]
return PlateRecord(self.id,
filter(lambda x: x.id[0] in rows and
x.id[1:] in cols,
self._wells.values()))
def __setitem__(self, key, value):
if not isinstance(key, basestring):
raise ValueError('Well identifier should be string-like')
self._is_well(value)
# Provided key and well ID should be the same
if value.id != key:
raise ValueError('WellRecord ID and provided key are different' +
' (got "%s" and "%s")' % (type(value.id), type(key)))
self._wells[key] = value
self._update()
def __delitem__(self, key):
if not isinstance(key, basestring):
raise ValueError('Well identifier should be string-like')
del self._wells[key]
self._update()
def __iter__(self):
for well in sorted(self._wells):
yield self._wells[well]
def __contains__(self, wellid):
if wellid in self._wells:
return True
return False
def __len__(self):
"""Returns the number of wells in this plate"""
return len(self._wells)
def __eq__(self, other):
if isinstance(other, self.__class__):
return self._wells == other._wells
else:
return False
def __ne__(self, other):
return not self.__eq__(other)
def __add__(self, plate):
"""Add another PlateRecord object
The wells in both plates must be the same
A new PlateRecord object is returned, having the same id as the
left operand
"""
if not isinstance(plate, PlateRecord):
raise TypeError('Expecting a PlateRecord object')
if set(x.id for x in self) != set(x.id for x in plate):
raise ValueError('The two plates have different wells')
wells = []
for w in self:
wells.append(w + plate[w.id])
newp = PlateRecord(self.id, wells=wells)
return newp
def __sub__(self, plate):
"""Subtract another PlateRecord object
The wells in both plates must be the same
A new PlateRecord object is returned, having the same id as the
left operand
"""
if not isinstance(plate, PlateRecord):
raise TypeError('Expecting a PlateRecord object')
if set(x.id for x in self) != set(x.id for x in plate):
raise ValueError('The two plates have different wells')
wells = []
for w in self:
wells.append(w - plate[w.id])
newp = PlateRecord(self.id, wells=wells)
return newp
def get_row(self, row):
"""Get all the wells of a given row
A row is identified with a letter (e.g. 'A')
"""
# Key is casted to str implicitly
try:
row = str(row)
except Exception:
# Is it even possible to get an exception here?
raise ValueError('Row identifier should be string-like')
if len(row) > 1:
raise ValueError('Row identifier must be of maximum one letter')
for w in sorted(filter(lambda x: x.startswith(row), self._wells)):
yield self._wells[w]
def get_column(self, column):
"""Get all the wells of a given column
A column is identified with a number (e.g. '6')
"""
# Column is casted to int implicitly
try:
column = int(column)
except Exception:
raise ValueError('Column identifier should be a number')
# A 96-well plate has well numbers in two digits
for w in sorted(filter(lambda x: x.endswith('%02d' % column),
self._wells)):
yield self._wells[w]
def subtract_control(self, control='A01', wells=None):
"""Subtract a 'control' well from the other plates wells
By default the control is subtracted to all wells, unless
a list of well ID is provided
The control well should belong to the plate
A new PlateRecord object is returned
"""
if control not in self:
raise ValueError('Control well not present in plate')
wcontrol = self[control]
if wells is None:
wells = self._wells.keys()
missing = set(w for w in wells if w not in self)
if missing:
raise ValueError('Some wells to be subtracted are not present')
nwells = []
for w in self:
if w.id in wells:
nwells.append(w - wcontrol)
else:
nwells.append(w)
newp = PlateRecord(self.id, wells=nwells)
return newp
def __repr__(self):
"""Returns a (truncated) representation of the plate for debugging."""
if len(self._wells) > 4:
# Show the last well and the first three
return "%s('%s, ..., %s')" % (self.__class__.__name__,
', '.join(
["%s['%s']" % (str(self[x].__class__.__name__), self[x].id)
for x in sorted(self._wells.keys())[:3]]),
"%s['%s']" % (
self[
sorted(self._wells.keys())[-1]].__class__.__name__,
self[sorted(self._wells.keys())[-1]].id))
else:
return "%s(%s)" % (self.__class__.__name__,
', '.join(
["%s['%s']" % (str(self[x].__class__.__name__), self[x].id)
for x in sorted(self._wells.keys())]
))
def __str__(self):
"""A human readable summary of the record (string).
The python built in function str works by calling the object's ___str__
method. e.g.
>>> from Bio import phenotype
>>> record = phenotype.read("plates.csv", "pm-csv")
>>> print(record)
Plate ID: PM09
Well: 96
Rows: 8
Columns: 12
PlateRecord('WellRecord['A01'], WellRecord['A02'], WellRecord['A03'], WellRecord['A04']...WellRecord['H12']')
Note that long well lists are shown truncated.
"""
lines = []
if self.id:
lines.append("Plate ID: %s" % self.id)
lines.append("Well: %i" % len(self))
# Here we assume that all well ID start with a char
lines.append("Rows: %d" %
len(set(x.id[0] for x in self)))
# Here we assume that well number is a two-digit number
lines.append("Columns: %d" %
len(set(x.id[1:3] for x in self)))
lines.append(repr(self))
return "\n".join(lines)
class WellRecord(object):
"""WellRecord object stores all the time course signals of a phenotype
Microarray well.
The single time points and signals can be
accessed iterating on the WellRecord or using lists indeces or slices:
>>> from Bio import phenotype
>>> plate = phenotype.read("plate.csv", "pm-csv")
>>> well = plate['A05']
>>> for time, signal in well:
... print("Time: %f, Signal: %f" % (time, signal))
...
Time: 0.0, Signal: 10.0
Time: 0.25, Signal: 14.0
Time: 0.5, Signal: 19.0
Time: 24.25, Signal: 32.0
...
>>> well[1]
23.0
>>> well[1:5]
[23.0, 23.0, 26.0, 26.0]
>>> well[1:5:0.5]
[23.0, 23.0, 23.0, 26.0, 26.0, 26.0, 26.0, 26.0]
If a time point was not present in the input file but it's between the
minimum and maximum time point, the interpolated signal is returned,
otherwise a nan value:
>>> well[1.3]
23.0
>>> well[1250]
nan
Two WellRecord objects can be compared: if their input time/signal pairs
are exactly the same, the two records are considered equal:
>>> well2 = plate['H12']
>>> well == well2
False
Two WellRecord objects can be summed up or subtracted from each other: a new
WellRecord object is returned, having the left operand id.
>>> well2 = well + well1
>>> print(well2.id)
A05
If SciPy is installed, a sigmoid function can be fitted to the PM curve,
in order to extract some parameters; three sigmoid functions are available:
* gompertz
* logistic
* richards
The functions are described in Zwietering et al., 1990 (PMID: 16348228)
>>> well.fit()
>>> print(well.slope, well.model)
(61.853516785566917, 'logistic')
If not sigmoid function is specified, the first one that is succesfully
fitted is used. The user can also specify a specific function.
>>> well.fit('gompertz')
>>> print(well.slope, well.model)
(127.94630059171354, 'gompertz')
If no function can be fitted, the parameters are left as None, except for
the max, min, average_height and area.
"""
def __init__(self, wellid, plate=None, signals=None):
if plate is None:
self.plate = PlateRecord(None)
else:
self.plate = plate
self.id = wellid
# Curve parameters (to be calculated with the "fit" function)
# Parameters that don't need scipy
self.max = None
self.min = None
self.average_height = None
# Parameters that need scipy
self.area = None
self.plateau = None
self.slope = None
self.lag = None
self.v = None
self.y0 = None
self.model = None
# Original signals (private)
if signals is None:
self._signals = {}
else:
self._signals = signals
def _interpolate(self, time):
"""Private method to get a linear interpolation of the signals
at certain time points.
"""
times = sorted(self._signals.keys())
return np.interp(time,
times,
[self._signals[x] for x in times],
left=np.nan, right=np.nan)
def __setitem__(self, time, signal):
"""Assign a signal at a certain time point.
"""
try:
time = float(time)
except ValueError:
raise ValueError('Time point should be a number')
try:
signal = float(signal)
except ValueError:
raise ValueError('Signal should be a number')
self._signals[time] = signal
def __getitem__(self, time):
"""Returns a subset of signals or a single signal.
"""
if isinstance(time, slice):
# Fix the missing values in the slice
if time.start is None:
start = 0
else:
start = time.start
if time.stop is None:
stop = max(self.get_times())
else:
stop = time.stop
time = np.arange(start, stop, time.step)
return list(self._interpolate(time))
elif _is_int_or_long(time) or isinstance(time, float):
return self._interpolate(time)
raise ValueError('Invalid index')
def __iter__(self):
for time in sorted(self._signals.keys()):
yield time, self._signals[time]
def __eq__(self, other):
if isinstance(other, self.__class__):
if list(self._signals.keys()) != list(other._signals.keys()):
return False
# Account for the presence of NaNs
for k in self._signals:
if np.isnan(self[k]) and np.isnan(other[k]):
continue
elif self[k] != other[k]:
return False
return True
else:
return False
def __ne__(self, other):
return not self.__eq__(other)
def __add__(self, well):
"""Add another WellRecord object
A new WellRecord object is returned, having the same id as the
left operand
"""
if not isinstance(well, WellRecord):
raise TypeError('Expecting a WellRecord object')
signals = {}
times = set(self._signals.keys()).union(set(well._signals.keys()))
for t in sorted(times):
signals[t] = self[t] + well[t]
neww = WellRecord(self.id, signals=signals)
return neww
def __sub__(self, well):
"""Subtract another WellRecord object
A new WellRecord object is returned, having the same id as the
left operand
"""
if not isinstance(well, WellRecord):
raise TypeError('Expecting a WellRecord object')
signals = {}
times = set(self._signals.keys()).union(set(well._signals.keys()))
for t in sorted(times):
signals[t] = self[t] - well[t]
neww = WellRecord(self.id, signals=signals)
return neww
def __len__(self):
"""Returns the number of time points sampled"""
return len(self._signals)
def __repr__(self):
"""Returns a (truncated) representation of the signals for debugging."""
if len(self) > 7:
# Shows the last time point and the first five
return "%s('%s, ..., %s')" % (self.__class__.__name__,
', '.join([str(x)
for x in self.get_raw()[:5]]),
str(self.get_raw()[-1]))
else:
return "%s(%s)" % (self.__class__.__name__,
', '.join([str(x) for x in self.get_raw()]))
def __str__(self):
"""A human readable summary of the record (string).
The python built in function str works by calling the object's ___str__
method. e.g.
>>> from Bio import phenotype
>>> plate = phenotype.read("plates.csv", "pm-csv")
>>> record = plate['A05']
>>> print(record)
Plate ID: PM09
Well ID: A05
Time points: 288
Minum signal 0.00 at time 10.00
Maximum signal 7.50 at time 32.00
WellRecord('(0.0, 10.0), (0.25, 14.0), (0.5, 19.0), (0.75, 19.0), (1.0, 23.0)...(71.75, 32.0)')
Note that long time spans are shown truncated.
"""
lines = []
if self.plate and self.plate.id:
lines.append("Plate ID: %s" % self.plate.id)
if self.id:
lines.append("Well ID: %s" % self.id)
lines.append("Time points: %i" % len(self))
lines.append("Minum signal %.2f at time %.2f" %
min(self, key=lambda x: x[1]))
lines.append("Maximum signal %.2f at time %.2f" %
max(self, key=lambda x: x[1]))
lines.append(repr(self))
return "\n".join(lines)
def get_raw(self):
"""Get a list of time/signal pairs"""
return [(t, self._signals[t]) for t in sorted(self._signals.keys())]
def get_times(self):
"""Get a list of the recorded time points"""
return sorted(self._signals.keys())
def get_signals(self):
"""Get a list of the recorded signals (ordered by collection time)"""
return [self._signals[t] for t in sorted(self._signals.keys())]
def fit(self, function=("gompertz", "logistic", "richards")):
"""Fit a sigmoid function to this well and extract curve parameters.
If function is None or an empty tuple/list, then no fitting is done.
Only the object's ``.min``, ``.max`` and ``.average_height`` are
calculated.
By default the following fitting functions will be used in order:
* gompertz
* logistic
* richards
The first function that is succesfuly fitted to the signals will
be used to extract the curve parameters and update ``.area`` and
``.model``. If no function can be fitted an exception is raised.
The function argument should be a tuple or list of any of these three
function names as strings.
There is no return value.
"""
avail_func = ('gompertz', 'logistic', 'richards', )
# Parameters not dependent on curve fitting
self.max = max(self, key=lambda x: x[1])[1]
self.min = min(self, key=lambda x: x[1])[1]
self.average_height = np.array(self.get_signals()).mean()
if not function:
self.area = None
self.model = None
return
for sigmoid_func in function:
if sigmoid_func not in avail_func:
raise ValueError("Fitting function %r not supported" % sigmoid_func)
# Parameters that depend on scipy curve_fit
from .pm_fitting import fit, get_area
from .pm_fitting import logistic, gompertz, richards
function_map = {'logistic': logistic,
'gompertz': gompertz,
'richards': richards}
self.area = get_area(self.get_signals(), self.get_times())
self.model = None
for sigmoid_func in function:
func = function_map[sigmoid_func]
try:
(self.plateau, self.slope,
self.lag, self.v, self.y0), pcov = fit(func,
self.get_times(),
self.get_signals())
self.model = sigmoid_func
return
except RuntimeError:
continue
raise RuntimeError("Could not fit any sigmoid function")
def JsonIterator(handle):
"""Generator function to iterate over PM json records (as PlateRecord
objects).
handle - input file
"""
try:
data = json.load(handle)
except ValueError:
raise ValueError('Could not parse JSON file')
# We can have one single plate or several
# we need to discriminate
if hasattr(data, 'keys'):
data = [data]
for pobj in data:
try:
plateID = pobj[_csvData][_plate]
except TypeError:
raise TypeError('Malformed JSON input')
except KeyError:
raise KeyError('Could not retrieve plate id')
# Parse also non-standard plate IDs
if not plateID.startswith(_platesPrefix) and not plateID.startswith(_platesPrefixMammalian):
warnings.warn('Non-standard plate ID found (%s)' % plateID,
BiopythonParserWarning)
else:
# Simplify the plates IDs, removing letters, as opm does
if plateID.startswith(_platesPrefixMammalian):
pID = plateID[len(_platesPrefixMammalian):]
else:
pID = plateID[len(_platesPrefix):]
while len(pID) > 0:
try:
int(pID)
break
except ValueError:
pID = pID[:-1]
# No luck
if len(pID) == 0:
warnings.warn('Non-standard plate ID found (%s)' % plateID,
BiopythonParserWarning)
elif int(pID) < 0:
warnings.warn('Non-standard plate ID found (%s), using %s' %
(plateID, _platesPrefix + abs(int(pID))))
plateID = _platesPrefix + abs(int(pID))
else:
if plateID.startswith(_platesPrefixMammalian):
plateID = _platesPrefixMammalian + '%02d' % int(pID)
else:
plateID = _platesPrefix + '%02d' % int(pID)
try:
times = pobj[_measurements][_hour]
except KeyError:
raise KeyError('Could not retrieve the time points')
plate = PlateRecord(plateID)
for k in pobj[_measurements]:
# Skip the time points
if k == _hour:
continue
plate[k] = WellRecord(k, plate=plate,
signals=dict([(times[i], pobj[_measurements][k][i])
for i in range(len(times))]))
# Remove the measurements and assign the other qualifiers
del pobj['measurements']
plate.qualifiers = pobj
yield plate
def CsvIterator(handle):
"""Generator function to iterate over PM csv records (as PlateRecord
objects).
handle - input file
"""
plate = None
data = False
qualifiers = {}
idx = {}
wells = {}
tblreader = csv.reader(handle, delimiter=',',
quotechar='"')
for line in tblreader:
if len(line) < 2:
continue
elif _datafile in line[0].strip():
# Do we have a previous plate?
if plate is not None:
qualifiers[_csvData][_datafile] = line[1].strip()
plate = PlateRecord(plate.id)
for k, v in wells.items():
plate[k] = WellRecord(k, plate, v)
plate.qualifiers = qualifiers
yield plate
plate = PlateRecord(None)
data = False
qualifiers[_csvData] = {}
idx = {}
wells = {}
elif _plate in line[0].strip():
plateID = line[1].strip()
qualifiers[_csvData][_plate] = plateID
# Parse also non-standard plate IDs
if not plateID.startswith(_platesPrefix) and not plateID.startswith(_platesPrefixMammalian):
warnings.warn('Non-standard plate ID found (%s)' % plateID,
BiopythonParserWarning)
else:
# Simplify the plates IDs, removing letters, as opm does
if plateID.startswith(_platesPrefixMammalian):
pID = plateID[len(_platesPrefixMammalian):]
else:
pID = plateID[len(_platesPrefix):]
while len(pID) > 0:
try:
int(pID)
break
except ValueError:
pID = pID[:-1]
# No luck
if len(pID) == 0:
warnings.warn('Non-standard plate ID found (%s)' % plateID,
BiopythonParserWarning)
elif int(pID) < 0:
warnings.warn('Non-standard plate ID found (%s), using %s' %
(plateID, _platesPrefix + abs(int(pID))))
plateID = _platesPrefix + abs(int(pID))
else:
if plateID.startswith(_platesPrefixMammalian):
plateID = _platesPrefixMammalian + '%02d' % int(pID)
else:
plateID = _platesPrefix + '%02d' % int(pID)
plate.id = plateID
elif _strainType in line[0].strip():
if plate is None:
continue
qualifiers[_csvData][_strainType] = line[1].strip()
elif _sample in line[0].strip():
if plate is None:
continue
qualifiers[_csvData][_sample] = line[1].strip()
elif _strainNumber in line[0].strip():
if plate is None:
continue
qualifiers[_csvData][_strainNumber] = line[1].strip()
elif _strainName in line[0].strip():
if plate is None:
continue
qualifiers[_csvData][_strainName] = line[1].strip()
elif _other in line[0].strip():
if plate is None:
continue
qualifiers[_csvData][_other] = line[1].strip()
elif _file in line[0].strip():
if plate is None:
continue
qualifiers[_csvData][_file] = line[1].strip()
elif _position in line[0].strip():
if plate is None:
continue
qualifiers[_csvData][_position] = line[1].strip()
elif _other in line[0].strip():
if plate is None:
continue
qualifiers[_csvData][_setupTime] = line[1].strip()
elif _hour in line[0].strip():
if plate is None:
continue
data = True
for i in range(1, len(line)):
x = line[i]
if x == '':
continue
wells[x.strip()] = {}
idx[i] = x.strip()
elif data:
if plate is None:
continue
# Workaround for bad-formatted files
try:
float(line[0])
except ValueError:
continue
time = float(line[0])
for i in range(1, len(line)):
x = line[i]
try:
signal = float(x)
except ValueError:
continue
well = idx[i]
wells[well][time] = signal
if plate is not None and plate.id is not None:
plate = PlateRecord(plate.id)
for k, v in wells.items():
plate[k] = WellRecord(k, plate, v)
plate.qualifiers = qualifiers
yield plate
def _toOPM(plate):
"""Transform a PlateRecord object into a dictionary (PRIVATE)."""
d = dict(plate.qualifiers.items())
d[_csvData] = {}
d[_csvData][_plate] = plate.id
d[_measurements] = {}
d[_measurements][_hour] = []
times = set()
for wid, w in plate._wells.items():
d[_measurements][wid] = []
for hour in w._signals:
times.add(hour)
for hour in sorted(times):
d[_measurements][_hour].append(hour)
for wid, w in plate._wells.items():
if hour in w._signals:
d[_measurements][wid].append(w[hour])
# This shouldn't happen
else:
d[_measurements][wid].append(float('nan'))
return d
class JsonWriter(object):
"""Class to write PM Json format files."""
def __init__(self, plates):
self.plates = plates
def write(self, handle):
"""Write this instance's plates to a file handle."""
out = []
for plate in self.plates:
try:
out.append(_toOPM(plate))
except ValueError:
raise ValueError('Could not export plate(s) in JSON format')
handle.write(json.dumps(out) + '\n')
return len(out)
|
zjuchenyuan/BioWeb
|
Lib/Bio/phenotype/phen_micro.py
|
Python
|
mit
| 37,132
|
[
"Biopython"
] |
c8c33dff3e78e4e56b4385ee76363dc03c151d32361784da6e008c2678c23127
|
# Principal Component Analysis Code :
from numpy import mean,cov,double,cumsum,dot,linalg,array,rank,size,flipud
from pylab import *
import numpy as np
import matplotlib.pyplot as pp
#from enthought.mayavi import mlab
import scipy.ndimage as ni
import roslib; roslib.load_manifest('sandbox_tapo_darpa_m3')
import rospy
#import hrl_lib.mayavi2_util as mu
import hrl_lib.viz as hv
import hrl_lib.util as ut
import hrl_lib.matplotlib_util as mpu
import pickle
from mvpa.clfs.knn import kNN
from mvpa.datasets import Dataset
from mvpa.clfs.transerror import TransferError
from mvpa.misc.data_generators import normalFeatureDataset
from mvpa.algorithms.cvtranserror import CrossValidatedTransferError
from mvpa.datasets.splitters import NFoldSplitter
import sys
sys.path.insert(0, '/home/tapo/svn/robot1_data/usr/tapo/data_code/Classification/Data/Single_Contact_kNN/24')
from data_24 import Fmat_original
def pca(X):
#get dimensions
num_data,dim = X.shape
#center data
mean_X = X.mean(axis=1)
M = (X-mean_X) # subtract the mean (along columns)
Mcov = cov(M)
print 'PCA - COV-Method used'
val,vec = linalg.eig(Mcov)
#return the projection matrix, the variance and the mean
return vec,val,mean_X, M, Mcov
def my_mvpa(Y,num2):
#Using PYMVPA
PCA_data = np.array(Y)
PCA_label_2 = ['Styrofoam-Fixed']*5 + ['Books-Fixed']*5 + ['Bucket-Fixed']*5 + ['Bowl-Fixed']*5 + ['Can-Fixed']*5 + ['Box-Fixed']*5 + ['Pipe-Fixed']*5 + ['Styrofoam-Movable']*5 + ['Container-Movable']*5 + ['Books-Movable']*5 + ['Cloth-Roll-Movable']*5 + ['Black-Rubber-Movable']*5 + ['Can-Movable']*5 + ['Box-Movable']*5 + ['Rug-Fixed']*5 + ['Bubble-Wrap-1-Fixed']*5 + ['Pillow-1-Fixed']*5 + ['Bubble-Wrap-2-Fixed']*5 + ['Sponge-Fixed']*5 + ['Foliage-Fixed']*5 + ['Pillow-2-Fixed']*5 + ['Rug-Movable']*5 + ['Bubble-Wrap-1-Movable']*5 + ['Pillow-1-Movable']*5 + ['Bubble-Wrap-2-Movable']*5 + ['Pillow-2-Movable']*5 + ['Cushion-Movable']*5 + ['Sponge-Movable']*5
clf = kNN(k=num2)
terr = TransferError(clf)
ds1 = Dataset(samples=PCA_data,labels=PCA_label_2)
cvterr = CrossValidatedTransferError(terr,NFoldSplitter(cvtype=1),enable_states=['confusion'])
error = cvterr(ds1)
return (1-error)*100
def result(eigvec_total,eigval_total,mean_data_total,B,C,num_PC):
# Reduced Eigen-Vector Matrix according to highest Eigenvalues..(Considering First 20 based on above figure)
W = eigvec_total[:,0:num_PC]
m_W, n_W = np.shape(W)
# Normalizes the data set with respect to its variance (Not an Integral part of PCA, but useful)
length = len(eigval_total)
s = np.matrix(np.zeros(length)).T
i = 0
while i < length:
s[i] = sqrt(C[i,i])
i = i+1
Z = np.divide(B,s)
m_Z, n_Z = np.shape(Z)
#Projected Data:
Y = (W.T)*B # 'B' for my Laptop: otherwise 'Z' instead of 'B'
m_Y, n_Y = np.shape(Y.T)
return Y.T
if __name__ == '__main__':
Fmat = Fmat_original
# Checking the Data-Matrix
m_tot, n_tot = np.shape(Fmat)
print 'Total_Matrix_Shape:',m_tot,n_tot
eigvec_total, eigval_total, mean_data_total, B, C = pca(Fmat)
#print eigvec_total
#print eigval_total
#print mean_data_total
m_eigval_total, n_eigval_total = np.shape(np.matrix(eigval_total))
m_eigvec_total, n_eigvec_total = np.shape(eigvec_total)
m_mean_data_total, n_mean_data_total = np.shape(np.matrix(mean_data_total))
print 'Eigenvalue Shape:',m_eigval_total, n_eigval_total
print 'Eigenvector Shape:',m_eigvec_total, n_eigvec_total
print 'Mean-Data Shape:',m_mean_data_total, n_mean_data_total
#Recall that the cumulative sum of the eigenvalues shows the level of variance accounted by each of the corresponding eigenvectors. On the x axis there is the number of eigenvalues used.
perc_total = cumsum(eigval_total)/sum(eigval_total)
num_PC=1
while num_PC <=20:
Proj = np.zeros((140,num_PC))
Proj = result(eigvec_total,eigval_total,mean_data_total,B,C,num_PC)
# PYMVPA:
num=0
cv_acc = np.zeros(21)
while num <=20:
cv_acc[num] = my_mvpa(Proj,num)
num = num+1
plot(np.arange(21),cv_acc,'-s')
grid('True')
hold('True')
num_PC = num_PC+1
legend(('1-PC', '2-PCs', '3-PCs', '4-PCs', '5-PCs', '6-PCs', '7-PCs', '8-PCs', '9-PCs', '10-PCs', '11-PC', '12-PCs', '13-PCs', '14-PCs', '15-PCs', '16-PCs', '17-PCs', '18-PCs', '19-PCs', '20-PCs'))
ylabel('Cross-Validation Accuracy')
xlabel('k in k-NN Classifier')
show()
|
tapomayukh/projects_in_python
|
classification/Classification_with_kNN/Single_Contact_Classification/Final/best_kNN_PCA/objects/24/test11_cross_validate_objects_24_1200ms.py
|
Python
|
mit
| 4,613
|
[
"Mayavi"
] |
3bcf98f0a18856900ec1584d61c62cb536ead186af98cab634798605c881a000
|
#!/usr/local/bin/python -i
# preceeding line should have path for Python on your machine
# vizplotgui_gl.py
# Purpose: viz running LAMMPS simulation via GL tool with plot and GUI
# Syntax: vizplotgui_gl.py in.lammps Nfreq compute-ID
# in.lammps = LAMMPS input script
# Nfreq = plot data point and viz shapshot every this many steps
# compute-ID = ID of compute that calculates temperature
# (or any other scalar quantity)
# IMPORTANT: this script cannot yet be run in parallel via Pypar,
# because I can't seem to do a MPI-style broadcast in Pypar
import sys,time
sys.path.append("./pizza")
# methods called by GUI
def run():
global runflag
runflag = 1
def stop():
global runflag
runflag = 0
def settemp(value):
global temptarget
temptarget = slider.get()
def quit():
global breakflag
breakflag = 1
# method called by timestep loop every Nfreq steps
# read dump snapshot and viz it, update plot with compute value
def update(ntimestep):
d.next()
d.unscale()
g.show(ntimestep)
value = lmp.extract_compute(compute,0,0)
xaxis.append(ntimestep)
yaxis.append(value)
gn.plot(xaxis,yaxis)
# parse command line
argv = sys.argv
if len(argv) != 4:
print "Syntax: vizplotgui_gl.py in.lammps Nfreq compute-ID"
sys.exit()
infile = sys.argv[1]
nfreq = int(sys.argv[2])
compute = sys.argv[3]
me = 0
# uncomment if running in parallel via Pypar
#import pypar
#me = pypar.rank()
#nprocs = pypar.size()
from lammps import lammps
lmp = lammps()
# run infile all at once
# assumed to have no run command in it
# dump a file in native LAMMPS dump format for Pizza.py dump tool
lmp.file(infile)
lmp.command("thermo %d" % nfreq)
lmp.command("dump python all atom %d tmp.dump" % nfreq)
# initial 0-step run to generate initial 1-point plot, dump file, and image
lmp.command("run 0 pre yes post no")
value = lmp.extract_compute(compute,0,0)
ntimestep = 0
xaxis = [ntimestep]
yaxis = [value]
breakflag = 0
runflag = 0
temptarget = 1.0
# wrapper on GL window via Pizza.py gl tool
# just proc 0 handles reading of dump file and viz
if me == 0:
from Tkinter import *
tkroot = Tk()
tkroot.withdraw()
from dump import dump
from gl import gl
d = dump("tmp.dump",0)
g = gl(d)
d.next()
d.unscale()
g.zoom(1)
g.shift(0,0)
g.rotate(0,270)
g.q(10)
g.box(1)
g.show(ntimestep)
# display GUI with run/stop buttons and slider for temperature
if me == 0:
from Tkinter import *
tkroot = Tk()
tkroot.withdraw()
root = Toplevel(tkroot)
root.title("LAMMPS GUI")
frame = Frame(root)
Button(frame,text="Run",command=run).pack(side=LEFT)
Button(frame,text="Stop",command=stop).pack(side=LEFT)
slider = Scale(frame,from_=0.0,to=5.0,resolution=0.1,
orient=HORIZONTAL,label="Temperature")
slider.bind('<ButtonRelease-1>',settemp)
slider.set(temptarget)
slider.pack(side=LEFT)
Button(frame,text="Quit",command=quit).pack(side=RIGHT)
frame.pack()
tkroot.update()
# wrapper on GnuPlot via Pizza.py gnu tool
if me == 0:
from gnu import gnu
gn = gnu()
gn.plot(xaxis,yaxis)
gn.title(compute,"Timestep","Temperature")
# endless loop, checking status of GUI settings every Nfreq steps
# run with pre yes/no and post yes/no depending on go/stop status
# re-invoke fix langevin with new seed when temperature slider changes
# after re-invoke of fix langevin, run with pre yes
running = 0
temp = temptarget
seed = 12345
lmp.command("fix 2 all langevin %g %g 0.1 %d" % (temp,temp,seed))
while 1:
if me == 0: tkroot.update()
if temp != temptarget:
temp = temptarget
seed += me+1
lmp.command("fix 2 all langevin %g %g 0.1 12345" % (temp,temp))
running = 0
if runflag and running:
lmp.command("run %d pre no post no" % nfreq)
ntimestep += nfreq
if me == 0: update(ntimestep)
elif runflag and not running:
lmp.command("run %d pre yes post no" % nfreq)
ntimestep += nfreq
if me == 0: update(ntimestep)
elif not runflag and running:
lmp.command("run %d pre no post yes" % nfreq)
ntimestep += nfreq
if me == 0: update(ntimestep)
if breakflag: break
if runflag: running = 1
else: running = 0
time.sleep(0.01)
lmp.command("run 0 pre no post yes")
# uncomment if running in parallel via Pypar
#print "Proc %d out of %d procs has" % (me,nprocs), lmp
#pypar.finalize()
|
val-github/lammps-dev
|
python/examples/vizplotgui_gl.py
|
Python
|
gpl-2.0
| 4,375
|
[
"LAMMPS"
] |
722ef0e0572c84740642c433f686bca6a620fadda60ddf83d2b50e6c86a408f4
|
import numpy as np
import copy
import pdb
import os
import time
import matplotlib
matplotlib.use('Agg')
import matplotlib.pylab as plt
from .context import pep, vfe
from .context import pep_tmp as pep
from test_utils import check_grad
np.random.seed(42)
def test_gpr_pep_gaussian():
N_train = 20
M = 5
D = 2
Q = 3
y_train = np.random.randn(N_train, Q)
x_train = np.random.randn(N_train, D)
model = pep.SGPR_rank_one(x_train, y_train, M, lik='Gaussian')
params = model.init_hypers(y_train)
print 'gpr pep gaussian'
for alpha in np.linspace(0.05, 1, 20):
print alpha
check_grad(params, model, stochastic=False, alpha=alpha)
def compared_gpr_aep_gaussian_collapsed():
# make sure that the number of pep sweeps is large in the pep model
N_train = 20
M = 10
D = 1
Q = 1
for alpha in np.linspace(0.05, 1, 5):
y_train = np.random.randn(N_train, Q)
x_train = np.random.randn(N_train, D)
model = pep.SGPR_rank_one(x_train, y_train, M, lik='Gaussian')
params = model.init_hypers(y_train)
obj, grads = model.objective_function(params, N_train, alpha=alpha)
collapsed_model = vfe.SGPR_collapsed(x_train, y_train, M)
c_obj, c_grads = collapsed_model.objective_function(params, N_train, alpha=alpha)
print alpha
print obj - c_obj
print obj, c_obj
for key in c_grads.keys():
print key
print grads[key] - c_grads[key]
print grads[key]
print c_grads[key]
def test_gpr_pep_probit():
N_train = 5
alpha = 0.5
M = 3
D = 2
Q = 3
x_train = np.random.randn(N_train, D)
y_train = 2 * np.random.randint(0, 2, size=(N_train, Q)) - 1
model = pep.SGPR_rank_one(x_train, y_train, M, lik='Probit')
params = model.init_hypers(y_train)
print 'gpr aep probit'
for alpha in np.linspace(0.05, 1, 20):
print alpha
check_grad(params, model, stochastic=False, alpha=alpha)
# TODO
# def plot_gpr_pep_gaussian_stochastic():
# N_train = 2000
# alpha = 0.5
# M = 50
# D = 2
# Q = 3
# y_train = np.random.randn(N_train, Q)
# x_train = np.random.randn(N_train, D)
# model = aep.SGPR(x_train, y_train, M, lik='Gaussian')
# # init hypers, inducing points and q(u) params
# params = model.init_hypers(y_train)
# logZ, grad_all = model.objective_function(params, N_train, alpha=alpha)
# mbs = np.logspace(-2, 0, 10)
# reps = 20
# times = np.zeros(len(mbs))
# objs = np.zeros((len(mbs), reps))
# for i, mb in enumerate(mbs):
# no_points = int(N_train * mb)
# start_time = time.time()
# for k in range(reps):
# objs[i, k] = model.objective_function(
# params, no_points, alpha=alpha)[0]
# times[i] = time.time() - start_time
# f, (ax1, ax2) = plt.subplots(1, 2, figsize=(16, 6))
# ax1.plot(mbs, times, 'x-')
# ax1.set_xlabel("Minibatch proportion")
# ax1.set_ylabel("Time taken")
# ax1.set_xscale("log", nonposx='clip')
# ax2.plot(mbs, objs, 'kx')
# ax2.axhline(logZ, color='b')
# ax2.set_xlabel("Minibatch proportion")
# ax2.set_ylabel("ELBO estimates")
# ax2.set_xscale("log", nonposx='clip')
# plt.savefig('/tmp/gaussian_stochastic_aep_gpr.pdf')
# def plot_gpr_pep_probit_stochastic():
# N_train = 2000
# alpha = 0.5
# M = 50
# D = 2
# Q = 3
# x_train = np.random.randn(N_train, D)
# y_train = 2 * np.random.randint(0, 2, size=(N_train, Q)) - 1
# model = aep.SGPR(x_train, y_train, M, lik='Probit')
# # init hypers, inducing points and q(u) params
# params = model.init_hypers(y_train)
# logZ, grad_all = model.objective_function(params, N_train, alpha=alpha)
# mbs = np.logspace(-2, 0, 10)
# reps = 20
# times = np.zeros(len(mbs))
# objs = np.zeros((len(mbs), reps))
# for i, mb in enumerate(mbs):
# no_points = int(N_train * mb)
# start_time = time.time()
# for k in range(reps):
# objs[i, k] = model.objective_function(
# params, no_points, alpha=alpha)[0]
# times[i] = time.time() - start_time
# f, (ax1, ax2) = plt.subplots(1, 2, figsize=(16, 6))
# ax1.plot(mbs, times, 'x-')
# ax1.set_xlabel("Minibatch proportion")
# ax1.set_ylabel("Time taken")
# ax1.set_xscale("log", nonposx='clip')
# ax2.plot(mbs, objs, 'kx')
# ax2.axhline(logZ, color='b')
# ax2.set_xlabel("Minibatch proportion")
# ax2.set_ylabel("ELBO estimates")
# ax2.set_xscale("log", nonposx='clip')
# plt.savefig('/tmp/probit_stochastic_aep_gpr.pdf')
if __name__ == '__main__':
# compared_gpr_aep_gaussian_collapsed()
for i in range(1):
test_gpr_pep_gaussian()
test_gpr_pep_probit()
# plot_gpr_pep_probit_stochastic()
# plot_gpr_pep_gaussian_stochastic()
|
thangbui/geepee
|
tests/test_grads_pep.py
|
Python
|
mit
| 4,943
|
[
"Gaussian"
] |
0d9faa6381ecd2ecd4c23bdab2a422d8838c37d561236b354e9ff1dc73996959
|
"""Header value parser implementing various email-related RFC parsing rules.
The parsing methods defined in this module implement various email related
parsing rules. Principal among them is RFC 5322, which is the followon
to RFC 2822 and primarily a clarification of the former. It also implements
RFC 2047 encoded word decoding.
RFC 5322 goes to considerable trouble to maintain backward compatibility with
RFC 822 in the parse phase, while cleaning up the structure on the generation
phase. This parser supports correct RFC 5322 generation by tagging white space
as folding white space only when folding is allowed in the non-obsolete rule
sets. Actually, the parser is even more generous when accepting input than RFC
5322 mandates, following the spirit of Postel's Law, which RFC 5322 encourages.
Where possible deviations from the standard are annotated on the 'defects'
attribute of tokens that deviate.
The general structure of the parser follows RFC 5322, and uses its terminology
where there is a direct correspondence. Where the implementation requires a
somewhat different structure than that used by the formal grammar, new terms
that mimic the closest existing terms are used. Thus, it really helps to have
a copy of RFC 5322 handy when studying this code.
Input to the parser is a string that has already been unfolded according to
RFC 5322 rules. According to the RFC this unfolding is the very first step, and
this parser leaves the unfolding step to a higher level message parser, which
will have already detected the line breaks that need unfolding while
determining the beginning and end of each header.
The output of the parser is a TokenList object, which is a list subclass. A
TokenList is a recursive data structure. The terminal nodes of the structure
are Terminal objects, which are subclasses of str. These do not correspond
directly to terminal objects in the formal grammar, but are instead more
practical higher level combinations of true terminals.
All TokenList and Terminal objects have a 'value' attribute, which produces the
semantically meaningful value of that part of the parse subtree. The value of
all whitespace tokens (no matter how many sub-tokens they may contain) is a
single space, as per the RFC rules. This includes 'CFWS', which is herein
included in the general class of whitespace tokens. There is one exception to
the rule that whitespace tokens are collapsed into single spaces in values: in
the value of a 'bare-quoted-string' (a quoted-string with no leading or
trailing whitespace), any whitespace that appeared between the quotation marks
is preserved in the returned value. Note that in all Terminal strings quoted
pairs are turned into their unquoted values.
All TokenList and Terminal objects also have a string value, which attempts to
be a "canonical" representation of the RFC-compliant form of the substring that
produced the parsed subtree, including minimal use of quoted pair quoting.
Whitespace runs are not collapsed.
Comment tokens also have a 'content' attribute providing the string found
between the parens (including any nested comments) with whitespace preserved.
All TokenList and Terminal objects have a 'defects' attribute which is a
possibly empty list all of the defects found while creating the token. Defects
may appear on any token in the tree, and a composite list of all defects in the
subtree is available through the 'all_defects' attribute of any node. (For
Terminal notes x.defects == x.all_defects.)
Each object in a parse tree is called a 'token', and each has a 'token_type'
attribute that gives the name from the RFC 5322 grammar that it represents.
Not all RFC 5322 nodes are produced, and there is one non-RFC 5322 node that
may be produced: 'ptext'. A 'ptext' is a string of printable ascii characters.
It is returned in place of lists of (ctext/quoted-pair) and
(qtext/quoted-pair).
XXX: provide complete list of token types.
"""
import re
import urllib # For urllib.parse.unquote
from string import hexdigits
from collections import OrderedDict
from email import _encoded_words as _ew
from email import errors
from email import utils
#
# Useful constants and functions
#
WSP = set(' \t')
CFWS_LEADER = WSP | set('(')
SPECIALS = set(r'()<>@,:;.\"[]')
ATOM_ENDS = SPECIALS | WSP
DOT_ATOM_ENDS = ATOM_ENDS - set('.')
# '.', '"', and '(' do not end phrases in order to support obs-phrase
PHRASE_ENDS = SPECIALS - set('."(')
TSPECIALS = (SPECIALS | set('/?=')) - set('.')
TOKEN_ENDS = TSPECIALS | WSP
ASPECIALS = TSPECIALS | set("*'%")
ATTRIBUTE_ENDS = ASPECIALS | WSP
EXTENDED_ATTRIBUTE_ENDS = ATTRIBUTE_ENDS - set('%')
def quote_string(value):
return '"'+str(value).replace('\\', '\\\\').replace('"', r'\"')+'"'
#
# Accumulator for header folding
#
class _Folded:
def __init__(self, maxlen, policy):
self.maxlen = maxlen
self.policy = policy
self.lastlen = 0
self.stickyspace = None
self.firstline = True
self.done = []
self.current = []
def newline(self):
self.done.extend(self.current)
self.done.append(self.policy.linesep)
self.current.clear()
self.lastlen = 0
def finalize(self):
if self.current:
self.newline()
def __str__(self):
return ''.join(self.done)
def append(self, stoken):
self.current.append(stoken)
def append_if_fits(self, token, stoken=None):
if stoken is None:
stoken = str(token)
l = len(stoken)
if self.stickyspace is not None:
stickyspace_len = len(self.stickyspace)
if self.lastlen + stickyspace_len + l <= self.maxlen:
self.current.append(self.stickyspace)
self.lastlen += stickyspace_len
self.current.append(stoken)
self.lastlen += l
self.stickyspace = None
self.firstline = False
return True
if token.has_fws:
ws = token.pop_leading_fws()
if ws is not None:
self.stickyspace += str(ws)
stickyspace_len += len(ws)
token._fold(self)
return True
if stickyspace_len and l + 1 <= self.maxlen:
margin = self.maxlen - l
if 0 < margin < stickyspace_len:
trim = stickyspace_len - margin
self.current.append(self.stickyspace[:trim])
self.stickyspace = self.stickyspace[trim:]
stickyspace_len = trim
self.newline()
self.current.append(self.stickyspace)
self.current.append(stoken)
self.lastlen = l + stickyspace_len
self.stickyspace = None
self.firstline = False
return True
if not self.firstline:
self.newline()
self.current.append(self.stickyspace)
self.current.append(stoken)
self.stickyspace = None
self.firstline = False
return True
if self.lastlen + l <= self.maxlen:
self.current.append(stoken)
self.lastlen += l
return True
if l < self.maxlen:
self.newline()
self.current.append(stoken)
self.lastlen = l
return True
return False
#
# TokenList and its subclasses
#
class TokenList(list):
token_type = None
def __init__(self, *args, **kw):
super().__init__(*args, **kw)
self.defects = []
def __str__(self):
return ''.join(str(x) for x in self)
def __repr__(self):
return '{}({})'.format(self.__class__.__name__,
super().__repr__())
@property
def value(self):
return ''.join(x.value for x in self if x.value)
@property
def all_defects(self):
return sum((x.all_defects for x in self), self.defects)
#
# Folding API
#
# parts():
#
# return a list of objects that constitute the "higher level syntactic
# objects" specified by the RFC as the best places to fold a header line.
# The returned objects must include leading folding white space, even if
# this means mutating the underlying parse tree of the object. Each object
# is only responsible for returning *its* parts, and should not drill down
# to any lower level except as required to meet the leading folding white
# space constraint.
#
# _fold(folded):
#
# folded: the result accumulator. This is an instance of _Folded.
# (XXX: I haven't finished factoring this out yet, the folding code
# pretty much uses this as a state object.) When the folded.current
# contains as much text as will fit, the _fold method should call
# folded.newline.
# folded.lastlen: the current length of the test stored in folded.current.
# folded.maxlen: The maximum number of characters that may appear on a
# folded line. Differs from the policy setting in that "no limit" is
# represented by +inf, which means it can be used in the trivially
# logical fashion in comparisons.
#
# Currently no subclasses implement parts, and I think this will remain
# true. A subclass only needs to implement _fold when the generic version
# isn't sufficient. _fold will need to be implemented primarily when it is
# possible for encoded words to appear in the specialized token-list, since
# there is no generic algorithm that can know where exactly the encoded
# words are allowed. A _fold implementation is responsible for filling
# lines in the same general way that the top level _fold does. It may, and
# should, call the _fold method of sub-objects in a similar fashion to that
# of the top level _fold.
#
# XXX: I'm hoping it will be possible to factor the existing code further
# to reduce redundancy and make the logic clearer.
@property
def parts(self):
klass = self.__class__
this = []
for token in self:
if token.startswith_fws():
if this:
yield this[0] if len(this)==1 else klass(this)
this.clear()
end_ws = token.pop_trailing_ws()
this.append(token)
if end_ws:
yield klass(this)
this = [end_ws]
if this:
yield this[0] if len(this)==1 else klass(this)
def startswith_fws(self):
return self[0].startswith_fws()
def pop_leading_fws(self):
if self[0].token_type == 'fws':
return self.pop(0)
return self[0].pop_leading_fws()
def pop_trailing_ws(self):
if self[-1].token_type == 'cfws':
return self.pop(-1)
return self[-1].pop_trailing_ws()
@property
def has_fws(self):
for part in self:
if part.has_fws:
return True
return False
def has_leading_comment(self):
return self[0].has_leading_comment()
@property
def comments(self):
comments = []
for token in self:
comments.extend(token.comments)
return comments
def fold(self, *, policy):
# max_line_length 0/None means no limit, ie: infinitely long.
maxlen = policy.max_line_length or float("+inf")
folded = _Folded(maxlen, policy)
self._fold(folded)
folded.finalize()
return str(folded)
def as_encoded_word(self, charset):
# This works only for things returned by 'parts', which include
# the leading fws, if any, that should be used.
res = []
ws = self.pop_leading_fws()
if ws:
res.append(ws)
trailer = self.pop(-1) if self[-1].token_type=='fws' else ''
res.append(_ew.encode(str(self), charset))
res.append(trailer)
return ''.join(res)
def cte_encode(self, charset, policy):
res = []
for part in self:
res.append(part.cte_encode(charset, policy))
return ''.join(res)
def _fold(self, folded):
for part in self.parts:
tstr = str(part)
tlen = len(tstr)
try:
str(part).encode('us-ascii')
except UnicodeEncodeError:
if any(isinstance(x, errors.UndecodableBytesDefect)
for x in part.all_defects):
charset = 'unknown-8bit'
else:
# XXX: this should be a policy setting
charset = 'utf-8'
tstr = part.cte_encode(charset, folded.policy)
tlen = len(tstr)
if folded.append_if_fits(part, tstr):
continue
# Peel off the leading whitespace if any and make it sticky, to
# avoid infinite recursion.
ws = part.pop_leading_fws()
if ws is not None:
# Peel off the leading whitespace and make it sticky, to
# avoid infinite recursion.
folded.stickyspace = str(part.pop(0))
if folded.append_if_fits(part):
continue
if part.has_fws:
part._fold(folded)
continue
# There are no fold points in this one; it is too long for a single
# line and can't be split...we just have to put it on its own line.
folded.append(tstr)
folded.newline()
def pprint(self, indent=''):
print('\n'.join(self._pp(indent='')))
def ppstr(self, indent=''):
return '\n'.join(self._pp(indent=''))
def _pp(self, indent=''):
yield '{}{}/{}('.format(
indent,
self.__class__.__name__,
self.token_type)
for token in self:
if not hasattr(token, '_pp'):
yield (indent + ' !! invalid element in token '
'list: {!r}'.format(token))
else:
yield from token._pp(indent+' ')
if self.defects:
extra = ' Defects: {}'.format(self.defects)
else:
extra = ''
yield '{}){}'.format(indent, extra)
class WhiteSpaceTokenList(TokenList):
@property
def value(self):
return ' '
@property
def comments(self):
return [x.content for x in self if x.token_type=='comment']
class UnstructuredTokenList(TokenList):
token_type = 'unstructured'
def _fold(self, folded):
last_ew = None
for part in self.parts:
tstr = str(part)
is_ew = False
try:
str(part).encode('us-ascii')
except UnicodeEncodeError:
if any(isinstance(x, errors.UndecodableBytesDefect)
for x in part.all_defects):
charset = 'unknown-8bit'
else:
charset = 'utf-8'
if last_ew is not None:
# We've already done an EW, combine this one with it
# if there's room.
chunk = get_unstructured(
''.join(folded.current[last_ew:]+[tstr])).as_encoded_word(charset)
oldlastlen = sum(len(x) for x in folded.current[:last_ew])
schunk = str(chunk)
lchunk = len(schunk)
if oldlastlen + lchunk <= folded.maxlen:
del folded.current[last_ew:]
folded.append(schunk)
folded.lastlen = oldlastlen + lchunk
continue
tstr = part.as_encoded_word(charset)
is_ew = True
if folded.append_if_fits(part, tstr):
if is_ew:
last_ew = len(folded.current) - 1
continue
if is_ew or last_ew:
# It's too big to fit on the line, but since we've
# got encoded words we can use encoded word folding.
part._fold_as_ew(folded)
continue
# Peel off the leading whitespace if any and make it sticky, to
# avoid infinite recursion.
ws = part.pop_leading_fws()
if ws is not None:
folded.stickyspace = str(ws)
if folded.append_if_fits(part):
continue
if part.has_fws:
part.fold(folded)
continue
# It can't be split...we just have to put it on its own line.
folded.append(tstr)
folded.newline()
last_ew = None
def cte_encode(self, charset, policy):
res = []
last_ew = None
for part in self:
spart = str(part)
try:
spart.encode('us-ascii')
res.append(spart)
except UnicodeEncodeError:
if last_ew is None:
res.append(part.cte_encode(charset, policy))
last_ew = len(res)
else:
tl = get_unstructured(''.join(res[last_ew:] + [spart]))
res.append(tl.as_encoded_word())
return ''.join(res)
class Phrase(TokenList):
token_type = 'phrase'
def _fold(self, folded):
# As with Unstructured, we can have pure ASCII with or without
# surrogateescape encoded bytes, or we could have unicode. But this
# case is more complicated, since we have to deal with the various
# sub-token types and how they can be composed in the face of
# unicode-that-needs-CTE-encoding, and the fact that if a token a
# comment that becomes a barrier across which we can't compose encoded
# words.
last_ew = None
for part in self.parts:
tstr = str(part)
tlen = len(tstr)
has_ew = False
try:
str(part).encode('us-ascii')
except UnicodeEncodeError:
if any(isinstance(x, errors.UndecodableBytesDefect)
for x in part.all_defects):
charset = 'unknown-8bit'
else:
charset = 'utf-8'
if last_ew is not None and not part.has_leading_comment():
# We've already done an EW, let's see if we can combine
# this one with it. The last_ew logic ensures that all we
# have at this point is atoms, no comments or quoted
# strings. So we can treat the text between the last
# encoded word and the content of this token as
# unstructured text, and things will work correctly. But
# we have to strip off any trailing comment on this token
# first, and if it is a quoted string we have to pull out
# the content (we're encoding it, so it no longer needs to
# be quoted).
if part[-1].token_type == 'cfws' and part.comments:
remainder = part.pop(-1)
else:
remainder = ''
for i, token in enumerate(part):
if token.token_type == 'bare-quoted-string':
part[i] = UnstructuredTokenList(token[:])
chunk = get_unstructured(
''.join(folded.current[last_ew:]+[tstr])).as_encoded_word(charset)
schunk = str(chunk)
lchunk = len(schunk)
if last_ew + lchunk <= folded.maxlen:
del folded.current[last_ew:]
folded.append(schunk)
folded.lastlen = sum(len(x) for x in folded.current)
continue
tstr = part.as_encoded_word(charset)
tlen = len(tstr)
has_ew = True
if folded.append_if_fits(part, tstr):
if has_ew and not part.comments:
last_ew = len(folded.current) - 1
elif part.comments or part.token_type == 'quoted-string':
# If a comment is involved we can't combine EWs. And if a
# quoted string is involved, it's not worth the effort to
# try to combine them.
last_ew = None
continue
part._fold(folded)
def cte_encode(self, charset, policy):
res = []
last_ew = None
is_ew = False
for part in self:
spart = str(part)
try:
spart.encode('us-ascii')
res.append(spart)
except UnicodeEncodeError:
is_ew = True
if last_ew is None:
if not part.comments:
last_ew = len(res)
res.append(part.cte_encode(charset, policy))
elif not part.has_leading_comment():
if part[-1].token_type == 'cfws' and part.comments:
remainder = part.pop(-1)
else:
remainder = ''
for i, token in enumerate(part):
if token.token_type == 'bare-quoted-string':
part[i] = UnstructuredTokenList(token[:])
tl = get_unstructured(''.join(res[last_ew:] + [spart]))
res[last_ew:] = [tl.as_encoded_word(charset)]
if part.comments or (not is_ew and part.token_type == 'quoted-string'):
last_ew = None
return ''.join(res)
class Word(TokenList):
token_type = 'word'
class CFWSList(WhiteSpaceTokenList):
token_type = 'cfws'
def has_leading_comment(self):
return bool(self.comments)
class Atom(TokenList):
token_type = 'atom'
class Token(TokenList):
token_type = 'token'
class EncodedWord(TokenList):
token_type = 'encoded-word'
cte = None
charset = None
lang = None
@property
def encoded(self):
if self.cte is not None:
return self.cte
_ew.encode(str(self), self.charset)
class QuotedString(TokenList):
token_type = 'quoted-string'
@property
def content(self):
for x in self:
if x.token_type == 'bare-quoted-string':
return x.value
@property
def quoted_value(self):
res = []
for x in self:
if x.token_type == 'bare-quoted-string':
res.append(str(x))
else:
res.append(x.value)
return ''.join(res)
@property
def stripped_value(self):
for token in self:
if token.token_type == 'bare-quoted-string':
return token.value
class BareQuotedString(QuotedString):
token_type = 'bare-quoted-string'
def __str__(self):
return quote_string(''.join(str(x) for x in self))
@property
def value(self):
return ''.join(str(x) for x in self)
class Comment(WhiteSpaceTokenList):
token_type = 'comment'
def __str__(self):
return ''.join(sum([
["("],
[self.quote(x) for x in self],
[")"],
], []))
def quote(self, value):
if value.token_type == 'comment':
return str(value)
return str(value).replace('\\', '\\\\').replace(
'(', '\(').replace(
')', '\)')
@property
def content(self):
return ''.join(str(x) for x in self)
@property
def comments(self):
return [self.content]
class AddressList(TokenList):
token_type = 'address-list'
@property
def addresses(self):
return [x for x in self if x.token_type=='address']
@property
def mailboxes(self):
return sum((x.mailboxes
for x in self if x.token_type=='address'), [])
@property
def all_mailboxes(self):
return sum((x.all_mailboxes
for x in self if x.token_type=='address'), [])
class Address(TokenList):
token_type = 'address'
@property
def display_name(self):
if self[0].token_type == 'group':
return self[0].display_name
@property
def mailboxes(self):
if self[0].token_type == 'mailbox':
return [self[0]]
elif self[0].token_type == 'invalid-mailbox':
return []
return self[0].mailboxes
@property
def all_mailboxes(self):
if self[0].token_type == 'mailbox':
return [self[0]]
elif self[0].token_type == 'invalid-mailbox':
return [self[0]]
return self[0].all_mailboxes
class MailboxList(TokenList):
token_type = 'mailbox-list'
@property
def mailboxes(self):
return [x for x in self if x.token_type=='mailbox']
@property
def all_mailboxes(self):
return [x for x in self
if x.token_type in ('mailbox', 'invalid-mailbox')]
class GroupList(TokenList):
token_type = 'group-list'
@property
def mailboxes(self):
if not self or self[0].token_type != 'mailbox-list':
return []
return self[0].mailboxes
@property
def all_mailboxes(self):
if not self or self[0].token_type != 'mailbox-list':
return []
return self[0].all_mailboxes
class Group(TokenList):
token_type = "group"
@property
def mailboxes(self):
if self[2].token_type != 'group-list':
return []
return self[2].mailboxes
@property
def all_mailboxes(self):
if self[2].token_type != 'group-list':
return []
return self[2].all_mailboxes
@property
def display_name(self):
return self[0].display_name
class NameAddr(TokenList):
token_type = 'name-addr'
@property
def display_name(self):
if len(self) == 1:
return None
return self[0].display_name
@property
def local_part(self):
return self[-1].local_part
@property
def domain(self):
return self[-1].domain
@property
def route(self):
return self[-1].route
@property
def addr_spec(self):
return self[-1].addr_spec
class AngleAddr(TokenList):
token_type = 'angle-addr'
@property
def local_part(self):
for x in self:
if x.token_type == 'addr-spec':
return x.local_part
@property
def domain(self):
for x in self:
if x.token_type == 'addr-spec':
return x.domain
@property
def route(self):
for x in self:
if x.token_type == 'obs-route':
return x.domains
@property
def addr_spec(self):
for x in self:
if x.token_type == 'addr-spec':
return x.addr_spec
else:
return '<>'
class ObsRoute(TokenList):
token_type = 'obs-route'
@property
def domains(self):
return [x.domain for x in self if x.token_type == 'domain']
class Mailbox(TokenList):
token_type = 'mailbox'
@property
def display_name(self):
if self[0].token_type == 'name-addr':
return self[0].display_name
@property
def local_part(self):
return self[0].local_part
@property
def domain(self):
return self[0].domain
@property
def route(self):
if self[0].token_type == 'name-addr':
return self[0].route
@property
def addr_spec(self):
return self[0].addr_spec
class InvalidMailbox(TokenList):
token_type = 'invalid-mailbox'
@property
def display_name(self):
return None
local_part = domain = route = addr_spec = display_name
class Domain(TokenList):
token_type = 'domain'
@property
def domain(self):
return ''.join(super().value.split())
class DotAtom(TokenList):
token_type = 'dot-atom'
class DotAtomText(TokenList):
token_type = 'dot-atom-text'
class AddrSpec(TokenList):
token_type = 'addr-spec'
@property
def local_part(self):
return self[0].local_part
@property
def domain(self):
if len(self) < 3:
return None
return self[-1].domain
@property
def value(self):
if len(self) < 3:
return self[0].value
return self[0].value.rstrip()+self[1].value+self[2].value.lstrip()
@property
def addr_spec(self):
nameset = set(self.local_part)
if len(nameset) > len(nameset-DOT_ATOM_ENDS):
lp = quote_string(self.local_part)
else:
lp = self.local_part
if self.domain is not None:
return lp + '@' + self.domain
return lp
class ObsLocalPart(TokenList):
token_type = 'obs-local-part'
class DisplayName(Phrase):
token_type = 'display-name'
@property
def display_name(self):
res = TokenList(self)
if res[0].token_type == 'cfws':
res.pop(0)
else:
if res[0][0].token_type == 'cfws':
res[0] = TokenList(res[0][1:])
if res[-1].token_type == 'cfws':
res.pop()
else:
if res[-1][-1].token_type == 'cfws':
res[-1] = TokenList(res[-1][:-1])
return res.value
@property
def value(self):
quote = False
if self.defects:
quote = True
else:
for x in self:
if x.token_type == 'quoted-string':
quote = True
if quote:
pre = post = ''
if self[0].token_type=='cfws' or self[0][0].token_type=='cfws':
pre = ' '
if self[-1].token_type=='cfws' or self[-1][-1].token_type=='cfws':
post = ' '
return pre+quote_string(self.display_name)+post
else:
return super().value
class LocalPart(TokenList):
token_type = 'local-part'
@property
def value(self):
if self[0].token_type == "quoted-string":
return self[0].quoted_value
else:
return self[0].value
@property
def local_part(self):
# Strip whitespace from front, back, and around dots.
res = [DOT]
last = DOT
last_is_tl = False
for tok in self[0] + [DOT]:
if tok.token_type == 'cfws':
continue
if (last_is_tl and tok.token_type == 'dot' and
last[-1].token_type == 'cfws'):
res[-1] = TokenList(last[:-1])
is_tl = isinstance(tok, TokenList)
if (is_tl and last.token_type == 'dot' and
tok[0].token_type == 'cfws'):
res.append(TokenList(tok[1:]))
else:
res.append(tok)
last = res[-1]
last_is_tl = is_tl
res = TokenList(res[1:-1])
return res.value
class DomainLiteral(TokenList):
token_type = 'domain-literal'
@property
def domain(self):
return ''.join(super().value.split())
@property
def ip(self):
for x in self:
if x.token_type == 'ptext':
return x.value
class MIMEVersion(TokenList):
token_type = 'mime-version'
major = None
minor = None
class Parameter(TokenList):
token_type = 'parameter'
sectioned = False
extended = False
charset = 'us-ascii'
@property
def section_number(self):
# Because the first token, the attribute (name) eats CFWS, the second
# token is always the section if there is one.
return self[1].number if self.sectioned else 0
@property
def param_value(self):
# This is part of the "handle quoted extended parameters" hack.
for token in self:
if token.token_type == 'value':
return token.stripped_value
if token.token_type == 'quoted-string':
for token in token:
if token.token_type == 'bare-quoted-string':
for token in token:
if token.token_type == 'value':
return token.stripped_value
return ''
class InvalidParameter(Parameter):
token_type = 'invalid-parameter'
class Attribute(TokenList):
token_type = 'attribute'
@property
def stripped_value(self):
for token in self:
if token.token_type.endswith('attrtext'):
return token.value
class Section(TokenList):
token_type = 'section'
number = None
class Value(TokenList):
token_type = 'value'
@property
def stripped_value(self):
token = self[0]
if token.token_type == 'cfws':
token = self[1]
if token.token_type.endswith(
('quoted-string', 'attribute', 'extended-attribute')):
return token.stripped_value
return self.value
class MimeParameters(TokenList):
token_type = 'mime-parameters'
@property
def params(self):
# The RFC specifically states that the ordering of parameters is not
# guaranteed and may be reordered by the transport layer. So we have
# to assume the RFC 2231 pieces can come in any order. However, we
# output them in the order that we first see a given name, which gives
# us a stable __str__.
params = OrderedDict()
for token in self:
if not token.token_type.endswith('parameter'):
continue
if token[0].token_type != 'attribute':
continue
name = token[0].value.strip()
if name not in params:
params[name] = []
params[name].append((token.section_number, token))
for name, parts in params.items():
parts = sorted(parts)
# XXX: there might be more recovery we could do here if, for
# example, this is really a case of a duplicate attribute name.
value_parts = []
charset = parts[0][1].charset
for i, (section_number, param) in enumerate(parts):
if section_number != i:
param.defects.append(errors.InvalidHeaderDefect(
"inconsistent multipart parameter numbering"))
value = param.param_value
if param.extended:
try:
value = urllib.parse.unquote_to_bytes(value)
except UnicodeEncodeError:
# source had surrogate escaped bytes. What we do now
# is a bit of an open question. I'm not sure this is
# the best choice, but it is what the old algorithm did
value = urllib.parse.unquote(value, encoding='latin-1')
else:
try:
value = value.decode(charset, 'surrogateescape')
except LookupError:
# XXX: there should really be a custom defect for
# unknown character set to make it easy to find,
# because otherwise unknown charset is a silent
# failure.
value = value.decode('us-ascii', 'surrogateescape')
if utils._has_surrogates(value):
param.defects.append(errors.UndecodableBytesDefect())
value_parts.append(value)
value = ''.join(value_parts)
yield name, value
def __str__(self):
params = []
for name, value in self.params:
if value:
params.append('{}={}'.format(name, quote_string(value)))
else:
params.append(name)
params = '; '.join(params)
return ' ' + params if params else ''
class ParameterizedHeaderValue(TokenList):
@property
def params(self):
for token in reversed(self):
if token.token_type == 'mime-parameters':
return token.params
return {}
@property
def parts(self):
if self and self[-1].token_type == 'mime-parameters':
# We don't want to start a new line if all of the params don't fit
# after the value, so unwrap the parameter list.
return TokenList(self[:-1] + self[-1])
return TokenList(self).parts
class ContentType(ParameterizedHeaderValue):
token_type = 'content-type'
maintype = 'text'
subtype = 'plain'
class ContentDisposition(ParameterizedHeaderValue):
token_type = 'content-disposition'
content_disposition = None
class ContentTransferEncoding(TokenList):
token_type = 'content-transfer-encoding'
cte = '7bit'
class HeaderLabel(TokenList):
token_type = 'header-label'
class Header(TokenList):
token_type = 'header'
def _fold(self, folded):
folded.append(str(self.pop(0)))
folded.lastlen = len(folded.current[0])
# The first line of the header is different from all others: we don't
# want to start a new object on a new line if it has any fold points in
# it that would allow part of it to be on the first header line.
# Further, if the first fold point would fit on the new line, we want
# to do that, but if it doesn't we want to put it on the first line.
# Folded supports this via the stickyspace attribute. If this
# attribute is not None, it does the special handling.
folded.stickyspace = str(self.pop(0)) if self[0].token_type == 'cfws' else ''
rest = self.pop(0)
if self:
raise ValueError("Malformed Header token list")
rest._fold(folded)
#
# Terminal classes and instances
#
class Terminal(str):
def __new__(cls, value, token_type):
self = super().__new__(cls, value)
self.token_type = token_type
self.defects = []
return self
def __repr__(self):
return "{}({})".format(self.__class__.__name__, super().__repr__())
@property
def all_defects(self):
return list(self.defects)
def _pp(self, indent=''):
return ["{}{}/{}({}){}".format(
indent,
self.__class__.__name__,
self.token_type,
super().__repr__(),
'' if not self.defects else ' {}'.format(self.defects),
)]
def cte_encode(self, charset, policy):
value = str(self)
try:
value.encode('us-ascii')
return value
except UnicodeEncodeError:
return _ew.encode(value, charset)
def pop_trailing_ws(self):
# This terminates the recursion.
return None
def pop_leading_fws(self):
# This terminates the recursion.
return None
@property
def comments(self):
return []
def has_leading_comment(self):
return False
def __getnewargs__(self):
return(str(self), self.token_type)
class WhiteSpaceTerminal(Terminal):
@property
def value(self):
return ' '
def startswith_fws(self):
return True
has_fws = True
class ValueTerminal(Terminal):
@property
def value(self):
return self
def startswith_fws(self):
return False
has_fws = False
def as_encoded_word(self, charset):
return _ew.encode(str(self), charset)
class EWWhiteSpaceTerminal(WhiteSpaceTerminal):
@property
def value(self):
return ''
@property
def encoded(self):
return self[:]
def __str__(self):
return ''
has_fws = True
# XXX these need to become classes and used as instances so
# that a program can't change them in a parse tree and screw
# up other parse trees. Maybe should have tests for that, too.
DOT = ValueTerminal('.', 'dot')
ListSeparator = ValueTerminal(',', 'list-separator')
RouteComponentMarker = ValueTerminal('@', 'route-component-marker')
#
# Parser
#
# Parse strings according to RFC822/2047/2822/5322 rules.
#
# This is a stateless parser. Each get_XXX function accepts a string and
# returns either a Terminal or a TokenList representing the RFC object named
# by the method and a string containing the remaining unparsed characters
# from the input. Thus a parser method consumes the next syntactic construct
# of a given type and returns a token representing the construct plus the
# unparsed remainder of the input string.
#
# For example, if the first element of a structured header is a 'phrase',
# then:
#
# phrase, value = get_phrase(value)
#
# returns the complete phrase from the start of the string value, plus any
# characters left in the string after the phrase is removed.
_wsp_splitter = re.compile(r'([{}]+)'.format(''.join(WSP))).split
_non_atom_end_matcher = re.compile(r"[^{}]+".format(
''.join(ATOM_ENDS).replace('\\','\\\\').replace(']','\]'))).match
_non_printable_finder = re.compile(r"[\x00-\x20\x7F]").findall
_non_token_end_matcher = re.compile(r"[^{}]+".format(
''.join(TOKEN_ENDS).replace('\\','\\\\').replace(']','\]'))).match
_non_attribute_end_matcher = re.compile(r"[^{}]+".format(
''.join(ATTRIBUTE_ENDS).replace('\\','\\\\').replace(']','\]'))).match
_non_extended_attribute_end_matcher = re.compile(r"[^{}]+".format(
''.join(EXTENDED_ATTRIBUTE_ENDS).replace(
'\\','\\\\').replace(']','\]'))).match
def _validate_xtext(xtext):
"""If input token contains ASCII non-printables, register a defect."""
non_printables = _non_printable_finder(xtext)
if non_printables:
xtext.defects.append(errors.NonPrintableDefect(non_printables))
if utils._has_surrogates(xtext):
xtext.defects.append(errors.UndecodableBytesDefect(
"Non-ASCII characters found in header token"))
def _get_ptext_to_endchars(value, endchars):
"""Scan printables/quoted-pairs until endchars and return unquoted ptext.
This function turns a run of qcontent, ccontent-without-comments, or
dtext-with-quoted-printables into a single string by unquoting any
quoted printables. It returns the string, the remaining value, and
a flag that is True iff there were any quoted printables decoded.
"""
fragment, *remainder = _wsp_splitter(value, 1)
vchars = []
escape = False
had_qp = False
for pos in range(len(fragment)):
if fragment[pos] == '\\':
if escape:
escape = False
had_qp = True
else:
escape = True
continue
if escape:
escape = False
elif fragment[pos] in endchars:
break
vchars.append(fragment[pos])
else:
pos = pos + 1
return ''.join(vchars), ''.join([fragment[pos:]] + remainder), had_qp
def get_fws(value):
"""FWS = 1*WSP
This isn't the RFC definition. We're using fws to represent tokens where
folding can be done, but when we are parsing the *un*folding has already
been done so we don't need to watch out for CRLF.
"""
newvalue = value.lstrip()
fws = WhiteSpaceTerminal(value[:len(value)-len(newvalue)], 'fws')
return fws, newvalue
def get_encoded_word(value):
""" encoded-word = "=?" charset "?" encoding "?" encoded-text "?="
"""
ew = EncodedWord()
if not value.startswith('=?'):
raise errors.HeaderParseError(
"expected encoded word but found {}".format(value))
tok, *remainder = value[2:].split('?=', 1)
if tok == value[2:]:
raise errors.HeaderParseError(
"expected encoded word but found {}".format(value))
remstr = ''.join(remainder)
if len(remstr) > 1 and remstr[0] in hexdigits and remstr[1] in hexdigits:
# The ? after the CTE was followed by an encoded word escape (=XX).
rest, *remainder = remstr.split('?=', 1)
tok = tok + '?=' + rest
if len(tok.split()) > 1:
ew.defects.append(errors.InvalidHeaderDefect(
"whitespace inside encoded word"))
ew.cte = value
value = ''.join(remainder)
try:
text, charset, lang, defects = _ew.decode('=?' + tok + '?=')
except ValueError:
raise errors.HeaderParseError(
"encoded word format invalid: '{}'".format(ew.cte))
ew.charset = charset
ew.lang = lang
ew.defects.extend(defects)
while text:
if text[0] in WSP:
token, text = get_fws(text)
ew.append(token)
continue
chars, *remainder = _wsp_splitter(text, 1)
vtext = ValueTerminal(chars, 'vtext')
_validate_xtext(vtext)
ew.append(vtext)
text = ''.join(remainder)
return ew, value
def get_unstructured(value):
"""unstructured = (*([FWS] vchar) *WSP) / obs-unstruct
obs-unstruct = *((*LF *CR *(obs-utext) *LF *CR)) / FWS)
obs-utext = %d0 / obs-NO-WS-CTL / LF / CR
obs-NO-WS-CTL is control characters except WSP/CR/LF.
So, basically, we have printable runs, plus control characters or nulls in
the obsolete syntax, separated by whitespace. Since RFC 2047 uses the
obsolete syntax in its specification, but requires whitespace on either
side of the encoded words, I can see no reason to need to separate the
non-printable-non-whitespace from the printable runs if they occur, so we
parse this into xtext tokens separated by WSP tokens.
Because an 'unstructured' value must by definition constitute the entire
value, this 'get' routine does not return a remaining value, only the
parsed TokenList.
"""
# XXX: but what about bare CR and LF? They might signal the start or
# end of an encoded word. YAGNI for now, since our current parsers
# will never send us strings with bare CR or LF.
unstructured = UnstructuredTokenList()
while value:
if value[0] in WSP:
token, value = get_fws(value)
unstructured.append(token)
continue
if value.startswith('=?'):
try:
token, value = get_encoded_word(value)
except errors.HeaderParseError:
# XXX: Need to figure out how to register defects when
# appropriate here.
pass
else:
have_ws = True
if len(unstructured) > 0:
if unstructured[-1].token_type != 'fws':
unstructured.defects.append(errors.InvalidHeaderDefect(
"missing whitespace before encoded word"))
have_ws = False
if have_ws and len(unstructured) > 1:
if unstructured[-2].token_type == 'encoded-word':
unstructured[-1] = EWWhiteSpaceTerminal(
unstructured[-1], 'fws')
unstructured.append(token)
continue
tok, *remainder = _wsp_splitter(value, 1)
vtext = ValueTerminal(tok, 'vtext')
_validate_xtext(vtext)
unstructured.append(vtext)
value = ''.join(remainder)
return unstructured
def get_qp_ctext(value):
"""ctext = <printable ascii except \ ( )>
This is not the RFC ctext, since we are handling nested comments in comment
and unquoting quoted-pairs here. We allow anything except the '()'
characters, but if we find any ASCII other than the RFC defined printable
ASCII an NonPrintableDefect is added to the token's defects list. Since
quoted pairs are converted to their unquoted values, what is returned is
a 'ptext' token. In this case it is a WhiteSpaceTerminal, so it's value
is ' '.
"""
ptext, value, _ = _get_ptext_to_endchars(value, '()')
ptext = WhiteSpaceTerminal(ptext, 'ptext')
_validate_xtext(ptext)
return ptext, value
def get_qcontent(value):
"""qcontent = qtext / quoted-pair
We allow anything except the DQUOTE character, but if we find any ASCII
other than the RFC defined printable ASCII an NonPrintableDefect is
added to the token's defects list. Any quoted pairs are converted to their
unquoted values, so what is returned is a 'ptext' token. In this case it
is a ValueTerminal.
"""
ptext, value, _ = _get_ptext_to_endchars(value, '"')
ptext = ValueTerminal(ptext, 'ptext')
_validate_xtext(ptext)
return ptext, value
def get_atext(value):
"""atext = <matches _atext_matcher>
We allow any non-ATOM_ENDS in atext, but add an InvalidATextDefect to
the token's defects list if we find non-atext characters.
"""
m = _non_atom_end_matcher(value)
if not m:
raise errors.HeaderParseError(
"expected atext but found '{}'".format(value))
atext = m.group()
value = value[len(atext):]
atext = ValueTerminal(atext, 'atext')
_validate_xtext(atext)
return atext, value
def get_bare_quoted_string(value):
"""bare-quoted-string = DQUOTE *([FWS] qcontent) [FWS] DQUOTE
A quoted-string without the leading or trailing white space. Its
value is the text between the quote marks, with whitespace
preserved and quoted pairs decoded.
"""
if value[0] != '"':
raise errors.HeaderParseError(
"expected '\"' but found '{}'".format(value))
bare_quoted_string = BareQuotedString()
value = value[1:]
while value and value[0] != '"':
if value[0] in WSP:
token, value = get_fws(value)
elif value[:2] == '=?':
try:
token, value = get_encoded_word(value)
bare_quoted_string.defects.append(errors.InvalidHeaderDefect(
"encoded word inside quoted string"))
except errors.HeaderParseError:
token, value = get_qcontent(value)
else:
token, value = get_qcontent(value)
bare_quoted_string.append(token)
if not value:
bare_quoted_string.defects.append(errors.InvalidHeaderDefect(
"end of header inside quoted string"))
return bare_quoted_string, value
return bare_quoted_string, value[1:]
def get_comment(value):
"""comment = "(" *([FWS] ccontent) [FWS] ")"
ccontent = ctext / quoted-pair / comment
We handle nested comments here, and quoted-pair in our qp-ctext routine.
"""
if value and value[0] != '(':
raise errors.HeaderParseError(
"expected '(' but found '{}'".format(value))
comment = Comment()
value = value[1:]
while value and value[0] != ")":
if value[0] in WSP:
token, value = get_fws(value)
elif value[0] == '(':
token, value = get_comment(value)
else:
token, value = get_qp_ctext(value)
comment.append(token)
if not value:
comment.defects.append(errors.InvalidHeaderDefect(
"end of header inside comment"))
return comment, value
return comment, value[1:]
def get_cfws(value):
"""CFWS = (1*([FWS] comment) [FWS]) / FWS
"""
cfws = CFWSList()
while value and value[0] in CFWS_LEADER:
if value[0] in WSP:
token, value = get_fws(value)
else:
token, value = get_comment(value)
cfws.append(token)
return cfws, value
def get_quoted_string(value):
"""quoted-string = [CFWS] <bare-quoted-string> [CFWS]
'bare-quoted-string' is an intermediate class defined by this
parser and not by the RFC grammar. It is the quoted string
without any attached CFWS.
"""
quoted_string = QuotedString()
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
quoted_string.append(token)
token, value = get_bare_quoted_string(value)
quoted_string.append(token)
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
quoted_string.append(token)
return quoted_string, value
def get_atom(value):
"""atom = [CFWS] 1*atext [CFWS]
An atom could be an rfc2047 encoded word.
"""
atom = Atom()
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
atom.append(token)
if value and value[0] in ATOM_ENDS:
raise errors.HeaderParseError(
"expected atom but found '{}'".format(value))
if value.startswith('=?'):
try:
token, value = get_encoded_word(value)
except errors.HeaderParseError:
# XXX: need to figure out how to register defects when
# appropriate here.
token, value = get_atext(value)
else:
token, value = get_atext(value)
atom.append(token)
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
atom.append(token)
return atom, value
def get_dot_atom_text(value):
""" dot-text = 1*atext *("." 1*atext)
"""
dot_atom_text = DotAtomText()
if not value or value[0] in ATOM_ENDS:
raise errors.HeaderParseError("expected atom at a start of "
"dot-atom-text but found '{}'".format(value))
while value and value[0] not in ATOM_ENDS:
token, value = get_atext(value)
dot_atom_text.append(token)
if value and value[0] == '.':
dot_atom_text.append(DOT)
value = value[1:]
if dot_atom_text[-1] is DOT:
raise errors.HeaderParseError("expected atom at end of dot-atom-text "
"but found '{}'".format('.'+value))
return dot_atom_text, value
def get_dot_atom(value):
""" dot-atom = [CFWS] dot-atom-text [CFWS]
Any place we can have a dot atom, we could instead have an rfc2047 encoded
word.
"""
dot_atom = DotAtom()
if value[0] in CFWS_LEADER:
token, value = get_cfws(value)
dot_atom.append(token)
if value.startswith('=?'):
try:
token, value = get_encoded_word(value)
except errors.HeaderParseError:
# XXX: need to figure out how to register defects when
# appropriate here.
token, value = get_dot_atom_text(value)
else:
token, value = get_dot_atom_text(value)
dot_atom.append(token)
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
dot_atom.append(token)
return dot_atom, value
def get_word(value):
"""word = atom / quoted-string
Either atom or quoted-string may start with CFWS. We have to peel off this
CFWS first to determine which type of word to parse. Afterward we splice
the leading CFWS, if any, into the parsed sub-token.
If neither an atom or a quoted-string is found before the next special, a
HeaderParseError is raised.
The token returned is either an Atom or a QuotedString, as appropriate.
This means the 'word' level of the formal grammar is not represented in the
parse tree; this is because having that extra layer when manipulating the
parse tree is more confusing than it is helpful.
"""
if value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
else:
leader = None
if value[0]=='"':
token, value = get_quoted_string(value)
elif value[0] in SPECIALS:
raise errors.HeaderParseError("Expected 'atom' or 'quoted-string' "
"but found '{}'".format(value))
else:
token, value = get_atom(value)
if leader is not None:
token[:0] = [leader]
return token, value
def get_phrase(value):
""" phrase = 1*word / obs-phrase
obs-phrase = word *(word / "." / CFWS)
This means a phrase can be a sequence of words, periods, and CFWS in any
order as long as it starts with at least one word. If anything other than
words is detected, an ObsoleteHeaderDefect is added to the token's defect
list. We also accept a phrase that starts with CFWS followed by a dot;
this is registered as an InvalidHeaderDefect, since it is not supported by
even the obsolete grammar.
"""
phrase = Phrase()
try:
token, value = get_word(value)
phrase.append(token)
except errors.HeaderParseError:
phrase.defects.append(errors.InvalidHeaderDefect(
"phrase does not start with word"))
while value and value[0] not in PHRASE_ENDS:
if value[0]=='.':
phrase.append(DOT)
phrase.defects.append(errors.ObsoleteHeaderDefect(
"period in 'phrase'"))
value = value[1:]
else:
try:
token, value = get_word(value)
except errors.HeaderParseError:
if value[0] in CFWS_LEADER:
token, value = get_cfws(value)
phrase.defects.append(errors.ObsoleteHeaderDefect(
"comment found without atom"))
else:
raise
phrase.append(token)
return phrase, value
def get_local_part(value):
""" local-part = dot-atom / quoted-string / obs-local-part
"""
local_part = LocalPart()
leader = None
if value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
if not value:
raise errors.HeaderParseError(
"expected local-part but found '{}'".format(value))
try:
token, value = get_dot_atom(value)
except errors.HeaderParseError:
try:
token, value = get_word(value)
except errors.HeaderParseError:
if value[0] != '\\' and value[0] in PHRASE_ENDS:
raise
token = TokenList()
if leader is not None:
token[:0] = [leader]
local_part.append(token)
if value and (value[0]=='\\' or value[0] not in PHRASE_ENDS):
obs_local_part, value = get_obs_local_part(str(local_part) + value)
if obs_local_part.token_type == 'invalid-obs-local-part':
local_part.defects.append(errors.InvalidHeaderDefect(
"local-part is not dot-atom, quoted-string, or obs-local-part"))
else:
local_part.defects.append(errors.ObsoleteHeaderDefect(
"local-part is not a dot-atom (contains CFWS)"))
local_part[0] = obs_local_part
try:
local_part.value.encode('ascii')
except UnicodeEncodeError:
local_part.defects.append(errors.NonASCIILocalPartDefect(
"local-part contains non-ASCII characters)"))
return local_part, value
def get_obs_local_part(value):
""" obs-local-part = word *("." word)
"""
obs_local_part = ObsLocalPart()
last_non_ws_was_dot = False
while value and (value[0]=='\\' or value[0] not in PHRASE_ENDS):
if value[0] == '.':
if last_non_ws_was_dot:
obs_local_part.defects.append(errors.InvalidHeaderDefect(
"invalid repeated '.'"))
obs_local_part.append(DOT)
last_non_ws_was_dot = True
value = value[1:]
continue
elif value[0]=='\\':
obs_local_part.append(ValueTerminal(value[0],
'misplaced-special'))
value = value[1:]
obs_local_part.defects.append(errors.InvalidHeaderDefect(
"'\\' character outside of quoted-string/ccontent"))
last_non_ws_was_dot = False
continue
if obs_local_part and obs_local_part[-1].token_type != 'dot':
obs_local_part.defects.append(errors.InvalidHeaderDefect(
"missing '.' between words"))
try:
token, value = get_word(value)
last_non_ws_was_dot = False
except errors.HeaderParseError:
if value[0] not in CFWS_LEADER:
raise
token, value = get_cfws(value)
obs_local_part.append(token)
if (obs_local_part[0].token_type == 'dot' or
obs_local_part[0].token_type=='cfws' and
obs_local_part[1].token_type=='dot'):
obs_local_part.defects.append(errors.InvalidHeaderDefect(
"Invalid leading '.' in local part"))
if (obs_local_part[-1].token_type == 'dot' or
obs_local_part[-1].token_type=='cfws' and
obs_local_part[-2].token_type=='dot'):
obs_local_part.defects.append(errors.InvalidHeaderDefect(
"Invalid trailing '.' in local part"))
if obs_local_part.defects:
obs_local_part.token_type = 'invalid-obs-local-part'
return obs_local_part, value
def get_dtext(value):
""" dtext = <printable ascii except \ [ ]> / obs-dtext
obs-dtext = obs-NO-WS-CTL / quoted-pair
We allow anything except the excluded characters, but if we find any
ASCII other than the RFC defined printable ASCII an NonPrintableDefect is
added to the token's defects list. Quoted pairs are converted to their
unquoted values, so what is returned is a ptext token, in this case a
ValueTerminal. If there were quoted-printables, an ObsoleteHeaderDefect is
added to the returned token's defect list.
"""
ptext, value, had_qp = _get_ptext_to_endchars(value, '[]')
ptext = ValueTerminal(ptext, 'ptext')
if had_qp:
ptext.defects.append(errors.ObsoleteHeaderDefect(
"quoted printable found in domain-literal"))
_validate_xtext(ptext)
return ptext, value
def _check_for_early_dl_end(value, domain_literal):
if value:
return False
domain_literal.append(errors.InvalidHeaderDefect(
"end of input inside domain-literal"))
domain_literal.append(ValueTerminal(']', 'domain-literal-end'))
return True
def get_domain_literal(value):
""" domain-literal = [CFWS] "[" *([FWS] dtext) [FWS] "]" [CFWS]
"""
domain_literal = DomainLiteral()
if value[0] in CFWS_LEADER:
token, value = get_cfws(value)
domain_literal.append(token)
if not value:
raise errors.HeaderParseError("expected domain-literal")
if value[0] != '[':
raise errors.HeaderParseError("expected '[' at start of domain-literal "
"but found '{}'".format(value))
value = value[1:]
if _check_for_early_dl_end(value, domain_literal):
return domain_literal, value
domain_literal.append(ValueTerminal('[', 'domain-literal-start'))
if value[0] in WSP:
token, value = get_fws(value)
domain_literal.append(token)
token, value = get_dtext(value)
domain_literal.append(token)
if _check_for_early_dl_end(value, domain_literal):
return domain_literal, value
if value[0] in WSP:
token, value = get_fws(value)
domain_literal.append(token)
if _check_for_early_dl_end(value, domain_literal):
return domain_literal, value
if value[0] != ']':
raise errors.HeaderParseError("expected ']' at end of domain-literal "
"but found '{}'".format(value))
domain_literal.append(ValueTerminal(']', 'domain-literal-end'))
value = value[1:]
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
domain_literal.append(token)
return domain_literal, value
def get_domain(value):
""" domain = dot-atom / domain-literal / obs-domain
obs-domain = atom *("." atom))
"""
domain = Domain()
leader = None
if value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
if not value:
raise errors.HeaderParseError(
"expected domain but found '{}'".format(value))
if value[0] == '[':
token, value = get_domain_literal(value)
if leader is not None:
token[:0] = [leader]
domain.append(token)
return domain, value
try:
token, value = get_dot_atom(value)
except errors.HeaderParseError:
token, value = get_atom(value)
if leader is not None:
token[:0] = [leader]
domain.append(token)
if value and value[0] == '.':
domain.defects.append(errors.ObsoleteHeaderDefect(
"domain is not a dot-atom (contains CFWS)"))
if domain[0].token_type == 'dot-atom':
domain[:] = domain[0]
while value and value[0] == '.':
domain.append(DOT)
token, value = get_atom(value[1:])
domain.append(token)
return domain, value
def get_addr_spec(value):
""" addr-spec = local-part "@" domain
"""
addr_spec = AddrSpec()
token, value = get_local_part(value)
addr_spec.append(token)
if not value or value[0] != '@':
addr_spec.defects.append(errors.InvalidHeaderDefect(
"add-spec local part with no domain"))
return addr_spec, value
addr_spec.append(ValueTerminal('@', 'address-at-symbol'))
token, value = get_domain(value[1:])
addr_spec.append(token)
return addr_spec, value
def get_obs_route(value):
""" obs-route = obs-domain-list ":"
obs-domain-list = *(CFWS / ",") "@" domain *("," [CFWS] ["@" domain])
Returns an obs-route token with the appropriate sub-tokens (that is,
there is no obs-domain-list in the parse tree).
"""
obs_route = ObsRoute()
while value and (value[0]==',' or value[0] in CFWS_LEADER):
if value[0] in CFWS_LEADER:
token, value = get_cfws(value)
obs_route.append(token)
elif value[0] == ',':
obs_route.append(ListSeparator)
value = value[1:]
if not value or value[0] != '@':
raise errors.HeaderParseError(
"expected obs-route domain but found '{}'".format(value))
obs_route.append(RouteComponentMarker)
token, value = get_domain(value[1:])
obs_route.append(token)
while value and value[0]==',':
obs_route.append(ListSeparator)
value = value[1:]
if not value:
break
if value[0] in CFWS_LEADER:
token, value = get_cfws(value)
obs_route.append(token)
if value[0] == '@':
obs_route.append(RouteComponentMarker)
token, value = get_domain(value[1:])
obs_route.append(token)
if not value:
raise errors.HeaderParseError("end of header while parsing obs-route")
if value[0] != ':':
raise errors.HeaderParseError( "expected ':' marking end of "
"obs-route but found '{}'".format(value))
obs_route.append(ValueTerminal(':', 'end-of-obs-route-marker'))
return obs_route, value[1:]
def get_angle_addr(value):
""" angle-addr = [CFWS] "<" addr-spec ">" [CFWS] / obs-angle-addr
obs-angle-addr = [CFWS] "<" obs-route addr-spec ">" [CFWS]
"""
angle_addr = AngleAddr()
if value[0] in CFWS_LEADER:
token, value = get_cfws(value)
angle_addr.append(token)
if not value or value[0] != '<':
raise errors.HeaderParseError(
"expected angle-addr but found '{}'".format(value))
angle_addr.append(ValueTerminal('<', 'angle-addr-start'))
value = value[1:]
# Although it is not legal per RFC5322, SMTP uses '<>' in certain
# circumstances.
if value[0] == '>':
angle_addr.append(ValueTerminal('>', 'angle-addr-end'))
angle_addr.defects.append(errors.InvalidHeaderDefect(
"null addr-spec in angle-addr"))
value = value[1:]
return angle_addr, value
try:
token, value = get_addr_spec(value)
except errors.HeaderParseError:
try:
token, value = get_obs_route(value)
angle_addr.defects.append(errors.ObsoleteHeaderDefect(
"obsolete route specification in angle-addr"))
except errors.HeaderParseError:
raise errors.HeaderParseError(
"expected addr-spec or obs-route but found '{}'".format(value))
angle_addr.append(token)
token, value = get_addr_spec(value)
angle_addr.append(token)
if value and value[0] == '>':
value = value[1:]
else:
angle_addr.defects.append(errors.InvalidHeaderDefect(
"missing trailing '>' on angle-addr"))
angle_addr.append(ValueTerminal('>', 'angle-addr-end'))
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
angle_addr.append(token)
return angle_addr, value
def get_display_name(value):
""" display-name = phrase
Because this is simply a name-rule, we don't return a display-name
token containing a phrase, but rather a display-name token with
the content of the phrase.
"""
display_name = DisplayName()
token, value = get_phrase(value)
display_name.extend(token[:])
display_name.defects = token.defects[:]
return display_name, value
def get_name_addr(value):
""" name-addr = [display-name] angle-addr
"""
name_addr = NameAddr()
# Both the optional display name and the angle-addr can start with cfws.
leader = None
if value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
if not value:
raise errors.HeaderParseError(
"expected name-addr but found '{}'".format(leader))
if value[0] != '<':
if value[0] in PHRASE_ENDS:
raise errors.HeaderParseError(
"expected name-addr but found '{}'".format(value))
token, value = get_display_name(value)
if not value:
raise errors.HeaderParseError(
"expected name-addr but found '{}'".format(token))
if leader is not None:
token[0][:0] = [leader]
leader = None
name_addr.append(token)
token, value = get_angle_addr(value)
if leader is not None:
token[:0] = [leader]
name_addr.append(token)
return name_addr, value
def get_mailbox(value):
""" mailbox = name-addr / addr-spec
"""
# The only way to figure out if we are dealing with a name-addr or an
# addr-spec is to try parsing each one.
mailbox = Mailbox()
try:
token, value = get_name_addr(value)
except errors.HeaderParseError:
try:
token, value = get_addr_spec(value)
except errors.HeaderParseError:
raise errors.HeaderParseError(
"expected mailbox but found '{}'".format(value))
if any(isinstance(x, errors.InvalidHeaderDefect)
for x in token.all_defects):
mailbox.token_type = 'invalid-mailbox'
mailbox.append(token)
return mailbox, value
def get_invalid_mailbox(value, endchars):
""" Read everything up to one of the chars in endchars.
This is outside the formal grammar. The InvalidMailbox TokenList that is
returned acts like a Mailbox, but the data attributes are None.
"""
invalid_mailbox = InvalidMailbox()
while value and value[0] not in endchars:
if value[0] in PHRASE_ENDS:
invalid_mailbox.append(ValueTerminal(value[0],
'misplaced-special'))
value = value[1:]
else:
token, value = get_phrase(value)
invalid_mailbox.append(token)
return invalid_mailbox, value
def get_mailbox_list(value):
""" mailbox-list = (mailbox *("," mailbox)) / obs-mbox-list
obs-mbox-list = *([CFWS] ",") mailbox *("," [mailbox / CFWS])
For this routine we go outside the formal grammar in order to improve error
handling. We recognize the end of the mailbox list only at the end of the
value or at a ';' (the group terminator). This is so that we can turn
invalid mailboxes into InvalidMailbox tokens and continue parsing any
remaining valid mailboxes. We also allow all mailbox entries to be null,
and this condition is handled appropriately at a higher level.
"""
mailbox_list = MailboxList()
while value and value[0] != ';':
try:
token, value = get_mailbox(value)
mailbox_list.append(token)
except errors.HeaderParseError:
leader = None
if value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
if not value or value[0] in ',;':
mailbox_list.append(leader)
mailbox_list.defects.append(errors.ObsoleteHeaderDefect(
"empty element in mailbox-list"))
else:
token, value = get_invalid_mailbox(value, ',;')
if leader is not None:
token[:0] = [leader]
mailbox_list.append(token)
mailbox_list.defects.append(errors.InvalidHeaderDefect(
"invalid mailbox in mailbox-list"))
elif value[0] == ',':
mailbox_list.defects.append(errors.ObsoleteHeaderDefect(
"empty element in mailbox-list"))
else:
token, value = get_invalid_mailbox(value, ',;')
if leader is not None:
token[:0] = [leader]
mailbox_list.append(token)
mailbox_list.defects.append(errors.InvalidHeaderDefect(
"invalid mailbox in mailbox-list"))
if value and value[0] not in ',;':
# Crap after mailbox; treat it as an invalid mailbox.
# The mailbox info will still be available.
mailbox = mailbox_list[-1]
mailbox.token_type = 'invalid-mailbox'
token, value = get_invalid_mailbox(value, ',;')
mailbox.extend(token)
mailbox_list.defects.append(errors.InvalidHeaderDefect(
"invalid mailbox in mailbox-list"))
if value and value[0] == ',':
mailbox_list.append(ListSeparator)
value = value[1:]
return mailbox_list, value
def get_group_list(value):
""" group-list = mailbox-list / CFWS / obs-group-list
obs-group-list = 1*([CFWS] ",") [CFWS]
"""
group_list = GroupList()
if not value:
group_list.defects.append(errors.InvalidHeaderDefect(
"end of header before group-list"))
return group_list, value
leader = None
if value and value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
if not value:
# This should never happen in email parsing, since CFWS-only is a
# legal alternative to group-list in a group, which is the only
# place group-list appears.
group_list.defects.append(errors.InvalidHeaderDefect(
"end of header in group-list"))
group_list.append(leader)
return group_list, value
if value[0] == ';':
group_list.append(leader)
return group_list, value
token, value = get_mailbox_list(value)
if len(token.all_mailboxes)==0:
if leader is not None:
group_list.append(leader)
group_list.extend(token)
group_list.defects.append(errors.ObsoleteHeaderDefect(
"group-list with empty entries"))
return group_list, value
if leader is not None:
token[:0] = [leader]
group_list.append(token)
return group_list, value
def get_group(value):
""" group = display-name ":" [group-list] ";" [CFWS]
"""
group = Group()
token, value = get_display_name(value)
if not value or value[0] != ':':
raise errors.HeaderParseError("expected ':' at end of group "
"display name but found '{}'".format(value))
group.append(token)
group.append(ValueTerminal(':', 'group-display-name-terminator'))
value = value[1:]
if value and value[0] == ';':
group.append(ValueTerminal(';', 'group-terminator'))
return group, value[1:]
token, value = get_group_list(value)
group.append(token)
if not value:
group.defects.append(errors.InvalidHeaderDefect(
"end of header in group"))
if value[0] != ';':
raise errors.HeaderParseError(
"expected ';' at end of group but found {}".format(value))
group.append(ValueTerminal(';', 'group-terminator'))
value = value[1:]
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
group.append(token)
return group, value
def get_address(value):
""" address = mailbox / group
Note that counter-intuitively, an address can be either a single address or
a list of addresses (a group). This is why the returned Address object has
a 'mailboxes' attribute which treats a single address as a list of length
one. When you need to differentiate between to two cases, extract the single
element, which is either a mailbox or a group token.
"""
# The formal grammar isn't very helpful when parsing an address. mailbox
# and group, especially when allowing for obsolete forms, start off very
# similarly. It is only when you reach one of @, <, or : that you know
# what you've got. So, we try each one in turn, starting with the more
# likely of the two. We could perhaps make this more efficient by looking
# for a phrase and then branching based on the next character, but that
# would be a premature optimization.
address = Address()
try:
token, value = get_group(value)
except errors.HeaderParseError:
try:
token, value = get_mailbox(value)
except errors.HeaderParseError:
raise errors.HeaderParseError(
"expected address but found '{}'".format(value))
address.append(token)
return address, value
def get_address_list(value):
""" address_list = (address *("," address)) / obs-addr-list
obs-addr-list = *([CFWS] ",") address *("," [address / CFWS])
We depart from the formal grammar here by continuing to parse until the end
of the input, assuming the input to be entirely composed of an
address-list. This is always true in email parsing, and allows us
to skip invalid addresses to parse additional valid ones.
"""
address_list = AddressList()
while value:
try:
token, value = get_address(value)
address_list.append(token)
except errors.HeaderParseError as err:
leader = None
if value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
if not value or value[0] == ',':
address_list.append(leader)
address_list.defects.append(errors.ObsoleteHeaderDefect(
"address-list entry with no content"))
else:
token, value = get_invalid_mailbox(value, ',')
if leader is not None:
token[:0] = [leader]
address_list.append(Address([token]))
address_list.defects.append(errors.InvalidHeaderDefect(
"invalid address in address-list"))
elif value[0] == ',':
address_list.defects.append(errors.ObsoleteHeaderDefect(
"empty element in address-list"))
else:
token, value = get_invalid_mailbox(value, ',')
if leader is not None:
token[:0] = [leader]
address_list.append(Address([token]))
address_list.defects.append(errors.InvalidHeaderDefect(
"invalid address in address-list"))
if value and value[0] != ',':
# Crap after address; treat it as an invalid mailbox.
# The mailbox info will still be available.
mailbox = address_list[-1][0]
mailbox.token_type = 'invalid-mailbox'
token, value = get_invalid_mailbox(value, ',')
mailbox.extend(token)
address_list.defects.append(errors.InvalidHeaderDefect(
"invalid address in address-list"))
if value: # Must be a , at this point.
address_list.append(ValueTerminal(',', 'list-separator'))
value = value[1:]
return address_list, value
#
# XXX: As I begin to add additional header parsers, I'm realizing we probably
# have two level of parser routines: the get_XXX methods that get a token in
# the grammar, and parse_XXX methods that parse an entire field value. So
# get_address_list above should really be a parse_ method, as probably should
# be get_unstructured.
#
def parse_mime_version(value):
""" mime-version = [CFWS] 1*digit [CFWS] "." [CFWS] 1*digit [CFWS]
"""
# The [CFWS] is implicit in the RFC 2045 BNF.
# XXX: This routine is a bit verbose, should factor out a get_int method.
mime_version = MIMEVersion()
if not value:
mime_version.defects.append(errors.HeaderMissingRequiredValue(
"Missing MIME version number (eg: 1.0)"))
return mime_version
if value[0] in CFWS_LEADER:
token, value = get_cfws(value)
mime_version.append(token)
if not value:
mime_version.defects.append(errors.HeaderMissingRequiredValue(
"Expected MIME version number but found only CFWS"))
digits = ''
while value and value[0] != '.' and value[0] not in CFWS_LEADER:
digits += value[0]
value = value[1:]
if not digits.isdigit():
mime_version.defects.append(errors.InvalidHeaderDefect(
"Expected MIME major version number but found {!r}".format(digits)))
mime_version.append(ValueTerminal(digits, 'xtext'))
else:
mime_version.major = int(digits)
mime_version.append(ValueTerminal(digits, 'digits'))
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
mime_version.append(token)
if not value or value[0] != '.':
if mime_version.major is not None:
mime_version.defects.append(errors.InvalidHeaderDefect(
"Incomplete MIME version; found only major number"))
if value:
mime_version.append(ValueTerminal(value, 'xtext'))
return mime_version
mime_version.append(ValueTerminal('.', 'version-separator'))
value = value[1:]
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
mime_version.append(token)
if not value:
if mime_version.major is not None:
mime_version.defects.append(errors.InvalidHeaderDefect(
"Incomplete MIME version; found only major number"))
return mime_version
digits = ''
while value and value[0] not in CFWS_LEADER:
digits += value[0]
value = value[1:]
if not digits.isdigit():
mime_version.defects.append(errors.InvalidHeaderDefect(
"Expected MIME minor version number but found {!r}".format(digits)))
mime_version.append(ValueTerminal(digits, 'xtext'))
else:
mime_version.minor = int(digits)
mime_version.append(ValueTerminal(digits, 'digits'))
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
mime_version.append(token)
if value:
mime_version.defects.append(errors.InvalidHeaderDefect(
"Excess non-CFWS text after MIME version"))
mime_version.append(ValueTerminal(value, 'xtext'))
return mime_version
def get_invalid_parameter(value):
""" Read everything up to the next ';'.
This is outside the formal grammar. The InvalidParameter TokenList that is
returned acts like a Parameter, but the data attributes are None.
"""
invalid_parameter = InvalidParameter()
while value and value[0] != ';':
if value[0] in PHRASE_ENDS:
invalid_parameter.append(ValueTerminal(value[0],
'misplaced-special'))
value = value[1:]
else:
token, value = get_phrase(value)
invalid_parameter.append(token)
return invalid_parameter, value
def get_ttext(value):
"""ttext = <matches _ttext_matcher>
We allow any non-TOKEN_ENDS in ttext, but add defects to the token's
defects list if we find non-ttext characters. We also register defects for
*any* non-printables even though the RFC doesn't exclude all of them,
because we follow the spirit of RFC 5322.
"""
m = _non_token_end_matcher(value)
if not m:
raise errors.HeaderParseError(
"expected ttext but found '{}'".format(value))
ttext = m.group()
value = value[len(ttext):]
ttext = ValueTerminal(ttext, 'ttext')
_validate_xtext(ttext)
return ttext, value
def get_token(value):
"""token = [CFWS] 1*ttext [CFWS]
The RFC equivalent of ttext is any US-ASCII chars except space, ctls, or
tspecials. We also exclude tabs even though the RFC doesn't.
The RFC implies the CFWS but is not explicit about it in the BNF.
"""
mtoken = Token()
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
mtoken.append(token)
if value and value[0] in TOKEN_ENDS:
raise errors.HeaderParseError(
"expected token but found '{}'".format(value))
token, value = get_ttext(value)
mtoken.append(token)
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
mtoken.append(token)
return mtoken, value
def get_attrtext(value):
"""attrtext = 1*(any non-ATTRIBUTE_ENDS character)
We allow any non-ATTRIBUTE_ENDS in attrtext, but add defects to the
token's defects list if we find non-attrtext characters. We also register
defects for *any* non-printables even though the RFC doesn't exclude all of
them, because we follow the spirit of RFC 5322.
"""
m = _non_attribute_end_matcher(value)
if not m:
raise errors.HeaderParseError(
"expected attrtext but found {!r}".format(value))
attrtext = m.group()
value = value[len(attrtext):]
attrtext = ValueTerminal(attrtext, 'attrtext')
_validate_xtext(attrtext)
return attrtext, value
def get_attribute(value):
""" [CFWS] 1*attrtext [CFWS]
This version of the BNF makes the CFWS explicit, and as usual we use a
value terminal for the actual run of characters. The RFC equivalent of
attrtext is the token characters, with the subtraction of '*', "'", and '%'.
We include tab in the excluded set just as we do for token.
"""
attribute = Attribute()
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
attribute.append(token)
if value and value[0] in ATTRIBUTE_ENDS:
raise errors.HeaderParseError(
"expected token but found '{}'".format(value))
token, value = get_attrtext(value)
attribute.append(token)
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
attribute.append(token)
return attribute, value
def get_extended_attrtext(value):
"""attrtext = 1*(any non-ATTRIBUTE_ENDS character plus '%')
This is a special parsing routine so that we get a value that
includes % escapes as a single string (which we decode as a single
string later).
"""
m = _non_extended_attribute_end_matcher(value)
if not m:
raise errors.HeaderParseError(
"expected extended attrtext but found {!r}".format(value))
attrtext = m.group()
value = value[len(attrtext):]
attrtext = ValueTerminal(attrtext, 'extended-attrtext')
_validate_xtext(attrtext)
return attrtext, value
def get_extended_attribute(value):
""" [CFWS] 1*extended_attrtext [CFWS]
This is like the non-extended version except we allow % characters, so that
we can pick up an encoded value as a single string.
"""
# XXX: should we have an ExtendedAttribute TokenList?
attribute = Attribute()
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
attribute.append(token)
if value and value[0] in EXTENDED_ATTRIBUTE_ENDS:
raise errors.HeaderParseError(
"expected token but found '{}'".format(value))
token, value = get_extended_attrtext(value)
attribute.append(token)
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
attribute.append(token)
return attribute, value
def get_section(value):
""" '*' digits
The formal BNF is more complicated because leading 0s are not allowed. We
check for that and add a defect. We also assume no CFWS is allowed between
the '*' and the digits, though the RFC is not crystal clear on that.
The caller should already have dealt with leading CFWS.
"""
section = Section()
if not value or value[0] != '*':
raise errors.HeaderParseError("Expected section but found {}".format(
value))
section.append(ValueTerminal('*', 'section-marker'))
value = value[1:]
if not value or not value[0].isdigit():
raise errors.HeaderParseError("Expected section number but "
"found {}".format(value))
digits = ''
while value and value[0].isdigit():
digits += value[0]
value = value[1:]
if digits[0] == '0' and digits != '0':
section.defects.append(errors.InvalidHeaderError("section number"
"has an invalid leading 0"))
section.number = int(digits)
section.append(ValueTerminal(digits, 'digits'))
return section, value
def get_value(value):
""" quoted-string / attribute
"""
v = Value()
if not value:
raise errors.HeaderParseError("Expected value but found end of string")
leader = None
if value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
if not value:
raise errors.HeaderParseError("Expected value but found "
"only {}".format(leader))
if value[0] == '"':
token, value = get_quoted_string(value)
else:
token, value = get_extended_attribute(value)
if leader is not None:
token[:0] = [leader]
v.append(token)
return v, value
def get_parameter(value):
""" attribute [section] ["*"] [CFWS] "=" value
The CFWS is implied by the RFC but not made explicit in the BNF. This
simplified form of the BNF from the RFC is made to conform with the RFC BNF
through some extra checks. We do it this way because it makes both error
recovery and working with the resulting parse tree easier.
"""
# It is possible CFWS would also be implicitly allowed between the section
# and the 'extended-attribute' marker (the '*') , but we've never seen that
# in the wild and we will therefore ignore the possibility.
param = Parameter()
token, value = get_attribute(value)
param.append(token)
if not value or value[0] == ';':
param.defects.append(errors.InvalidHeaderDefect("Parameter contains "
"name ({}) but no value".format(token)))
return param, value
if value[0] == '*':
try:
token, value = get_section(value)
param.sectioned = True
param.append(token)
except errors.HeaderParseError:
pass
if not value:
raise errors.HeaderParseError("Incomplete parameter")
if value[0] == '*':
param.append(ValueTerminal('*', 'extended-parameter-marker'))
value = value[1:]
param.extended = True
if value[0] != '=':
raise errors.HeaderParseError("Parameter not followed by '='")
param.append(ValueTerminal('=', 'parameter-separator'))
value = value[1:]
leader = None
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
param.append(token)
remainder = None
appendto = param
if param.extended and value and value[0] == '"':
# Now for some serious hackery to handle the common invalid case of
# double quotes around an extended value. We also accept (with defect)
# a value marked as encoded that isn't really.
qstring, remainder = get_quoted_string(value)
inner_value = qstring.stripped_value
semi_valid = False
if param.section_number == 0:
if inner_value and inner_value[0] == "'":
semi_valid = True
else:
token, rest = get_attrtext(inner_value)
if rest and rest[0] == "'":
semi_valid = True
else:
try:
token, rest = get_extended_attrtext(inner_value)
except:
pass
else:
if not rest:
semi_valid = True
if semi_valid:
param.defects.append(errors.InvalidHeaderDefect(
"Quoted string value for extended parameter is invalid"))
param.append(qstring)
for t in qstring:
if t.token_type == 'bare-quoted-string':
t[:] = []
appendto = t
break
value = inner_value
else:
remainder = None
param.defects.append(errors.InvalidHeaderDefect(
"Parameter marked as extended but appears to have a "
"quoted string value that is non-encoded"))
if value and value[0] == "'":
token = None
else:
token, value = get_value(value)
if not param.extended or param.section_number > 0:
if not value or value[0] != "'":
appendto.append(token)
if remainder is not None:
assert not value, value
value = remainder
return param, value
param.defects.append(errors.InvalidHeaderDefect(
"Apparent initial-extended-value but attribute "
"was not marked as extended or was not initial section"))
if not value:
# Assume the charset/lang is missing and the token is the value.
param.defects.append(errors.InvalidHeaderDefect(
"Missing required charset/lang delimiters"))
appendto.append(token)
if remainder is None:
return param, value
else:
if token is not None:
for t in token:
if t.token_type == 'extended-attrtext':
break
t.token_type == 'attrtext'
appendto.append(t)
param.charset = t.value
if value[0] != "'":
raise errors.HeaderParseError("Expected RFC2231 char/lang encoding "
"delimiter, but found {!r}".format(value))
appendto.append(ValueTerminal("'", 'RFC2231 delimiter'))
value = value[1:]
if value and value[0] != "'":
token, value = get_attrtext(value)
appendto.append(token)
param.lang = token.value
if not value or value[0] != "'":
raise errors.HeaderParseError("Expected RFC2231 char/lang encoding "
"delimiter, but found {}".format(value))
appendto.append(ValueTerminal("'", 'RFC2231 delimiter'))
value = value[1:]
if remainder is not None:
# Treat the rest of value as bare quoted string content.
v = Value()
while value:
if value[0] in WSP:
token, value = get_fws(value)
else:
token, value = get_qcontent(value)
v.append(token)
token = v
else:
token, value = get_value(value)
appendto.append(token)
if remainder is not None:
assert not value, value
value = remainder
return param, value
def parse_mime_parameters(value):
""" parameter *( ";" parameter )
That BNF is meant to indicate this routine should only be called after
finding and handling the leading ';'. There is no corresponding rule in
the formal RFC grammar, but it is more convenient for us for the set of
parameters to be treated as its own TokenList.
This is 'parse' routine because it consumes the reminaing value, but it
would never be called to parse a full header. Instead it is called to
parse everything after the non-parameter value of a specific MIME header.
"""
mime_parameters = MimeParameters()
while value:
try:
token, value = get_parameter(value)
mime_parameters.append(token)
except errors.HeaderParseError as err:
leader = None
if value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
if not value:
mime_parameters.append(leader)
return mime_parameters
if value[0] == ';':
if leader is not None:
mime_parameters.append(leader)
mime_parameters.defects.append(errors.InvalidHeaderDefect(
"parameter entry with no content"))
else:
token, value = get_invalid_parameter(value)
if leader:
token[:0] = [leader]
mime_parameters.append(token)
mime_parameters.defects.append(errors.InvalidHeaderDefect(
"invalid parameter {!r}".format(token)))
if value and value[0] != ';':
# Junk after the otherwise valid parameter. Mark it as
# invalid, but it will have a value.
param = mime_parameters[-1]
param.token_type = 'invalid-parameter'
token, value = get_invalid_parameter(value)
param.extend(token)
mime_parameters.defects.append(errors.InvalidHeaderDefect(
"parameter with invalid trailing text {!r}".format(token)))
if value:
# Must be a ';' at this point.
mime_parameters.append(ValueTerminal(';', 'parameter-separator'))
value = value[1:]
return mime_parameters
def _find_mime_parameters(tokenlist, value):
"""Do our best to find the parameters in an invalid MIME header
"""
while value and value[0] != ';':
if value[0] in PHRASE_ENDS:
tokenlist.append(ValueTerminal(value[0], 'misplaced-special'))
value = value[1:]
else:
token, value = get_phrase(value)
tokenlist.append(token)
if not value:
return
tokenlist.append(ValueTerminal(';', 'parameter-separator'))
tokenlist.append(parse_mime_parameters(value[1:]))
def parse_content_type_header(value):
""" maintype "/" subtype *( ";" parameter )
The maintype and substype are tokens. Theoretically they could
be checked against the official IANA list + x-token, but we
don't do that.
"""
ctype = ContentType()
recover = False
if not value:
ctype.defects.append(errors.HeaderMissingRequiredValue(
"Missing content type specification"))
return ctype
try:
token, value = get_token(value)
except errors.HeaderParseError:
ctype.defects.append(errors.InvalidHeaderDefect(
"Expected content maintype but found {!r}".format(value)))
_find_mime_parameters(ctype, value)
return ctype
ctype.append(token)
# XXX: If we really want to follow the formal grammer we should make
# mantype and subtype specialized TokenLists here. Probably not worth it.
if not value or value[0] != '/':
ctype.defects.append(errors.InvalidHeaderDefect(
"Invalid content type"))
if value:
_find_mime_parameters(ctype, value)
return ctype
ctype.maintype = token.value.strip().lower()
ctype.append(ValueTerminal('/', 'content-type-separator'))
value = value[1:]
try:
token, value = get_token(value)
except errors.HeaderParseError:
ctype.defects.append(errors.InvalidHeaderDefect(
"Expected content subtype but found {!r}".format(value)))
_find_mime_parameters(ctype, value)
return ctype
ctype.append(token)
ctype.subtype = token.value.strip().lower()
if not value:
return ctype
if value[0] != ';':
ctype.defects.append(errors.InvalidHeaderDefect(
"Only parameters are valid after content type, but "
"found {!r}".format(value)))
# The RFC requires that a syntactically invalid content-type be treated
# as text/plain. Perhaps we should postel this, but we should probably
# only do that if we were checking the subtype value against IANA.
del ctype.maintype, ctype.subtype
_find_mime_parameters(ctype, value)
return ctype
ctype.append(ValueTerminal(';', 'parameter-separator'))
ctype.append(parse_mime_parameters(value[1:]))
return ctype
def parse_content_disposition_header(value):
""" disposition-type *( ";" parameter )
"""
disp_header = ContentDisposition()
if not value:
disp_header.defects.append(errors.HeaderMissingRequiredValue(
"Missing content disposition"))
return disp_header
try:
token, value = get_token(value)
except errors.HeaderParseError:
disp_header.defects.append(errors.InvalidHeaderDefect(
"Expected content disposition but found {!r}".format(value)))
_find_mime_parameters(disp_header, value)
return disp_header
disp_header.append(token)
disp_header.content_disposition = token.value.strip().lower()
if not value:
return disp_header
if value[0] != ';':
disp_header.defects.append(errors.InvalidHeaderDefect(
"Only parameters are valid after content disposition, but "
"found {!r}".format(value)))
_find_mime_parameters(disp_header, value)
return disp_header
disp_header.append(ValueTerminal(';', 'parameter-separator'))
disp_header.append(parse_mime_parameters(value[1:]))
return disp_header
def parse_content_transfer_encoding_header(value):
""" mechanism
"""
# We should probably validate the values, since the list is fixed.
cte_header = ContentTransferEncoding()
if not value:
cte_header.defects.append(errors.HeaderMissingRequiredValue(
"Missing content transfer encoding"))
return cte_header
try:
token, value = get_token(value)
except errors.HeaderParseError:
cte_header.defects.append(errors.InvalidHeaderDefect(
"Expected content transfer encoding but found {!r}".format(value)))
else:
cte_header.append(token)
cte_header.cte = token.value.strip().lower()
if not value:
return cte_header
while value:
cte_header.defects.append(errors.InvalidHeaderDefect(
"Extra text after content transfer encoding"))
if value[0] in PHRASE_ENDS:
cte_header.append(ValueTerminal(value[0], 'misplaced-special'))
value = value[1:]
else:
token, value = get_phrase(value)
cte_header.append(token)
return cte_header
|
ruibarreira/linuxtrail
|
usr/lib/python3.4/email/_header_value_parser.py
|
Python
|
gpl-3.0
| 103,875
|
[
"CRYSTAL"
] |
dca7c0c707195c5266980131bc81246ca3cb1015b69ddc47f14209b1ab38e812
|
"""
Trying to build a network with shared connections:
>>> from random import random
>>> n = buildSharedCrossedNetwork()
Check if the parameters are the same:
>>> (n.connections[n['a']][0].params == n.connections[n['a']][1].params).all()
True
>>> (n.connections[n['b']][0].params == n.connections[n['c']][0].params).all()
True
>>> from pybrain.tools.xml.networkwriter import NetworkWriter
The transfomation of the first input to the second output is identical to the transformation of the
second towards the first:
>>> r1, r2 = random(), random()
>>> v1, v2 = n.activate([r1, r2])
>>> v3, v4 = n.activate([r2, r1])
>> n['b'].inputbuffer, n['c'].inputbuffer, n['b'].outputbuffer, n['c'].outputbuffer
>>> v1 == v4
True
>>> v2 == v3
True
Check its gradient:
>>> from pybrain.tests import gradientCheck
>>> gradientCheck(n)
Perfect gradient
True
Try writing it to an xml file, reread it and determine if it looks the same:
>>> from pybrain.tests import xmlInvariance
>>> xmlInvariance(n)
Same representation
Same function
Same class
"""
__author__ = 'Tom Schaul, tom@idsia.ch'
import scipy
from pybrain.structure.networks.feedforward import FeedForwardNetwork
from pybrain import LinearLayer, SharedFullConnection, MotherConnection
from pybrain.tests import runModuleTestSuite
def buildSharedCrossedNetwork():
""" build a network with shared connections. Two hiddne modules are symetrically linked, but to a different
input neuron than the output neuron. The weights are random. """
N = FeedForwardNetwork('shared-crossed')
h = 1
a = LinearLayer(2, name = 'a')
b = LinearLayer(h, name = 'b')
c = LinearLayer(h, name = 'c')
d = LinearLayer(2, name = 'd')
N.addInputModule(a)
N.addModule(b)
N.addModule(c)
N.addOutputModule(d)
m1 = MotherConnection(h)
m1.params[:] = scipy.array((1,))
m2 = MotherConnection(h)
m2.params[:] = scipy.array((2,))
N.addConnection(SharedFullConnection(m1, a, b, inSliceTo = 1))
N.addConnection(SharedFullConnection(m1, a, c, inSliceFrom = 1))
N.addConnection(SharedFullConnection(m2, b, d, outSliceFrom = 1))
N.addConnection(SharedFullConnection(m2, c, d, outSliceTo = 1))
N.sortModules()
return N
if __name__ == "__main__":
runModuleTestSuite(__import__('__main__'))
|
iut-ibk/Calimero
|
site-packages/pybrain/tests/unittests/test_shared_connections.py
|
Python
|
gpl-2.0
| 2,444
|
[
"NEURON"
] |
1455933431d20916a0e8e9a322c1308449df2dd8f6882fbf468cb45e97bd1331
|
# Copyright (c) 2017, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
"""
STIX Extension for AIS Data Markings
Unlike the other marking extensions, the AIS marking extension is not loaded
automatically, since AIS markings are not a part of the published STIX 1.x
specifications. They are included in python-stix because they're common enough
that it is not worth creating a separate package.
If you are writing code that needs to parse AIS markings, make sure that your
program imports this module before beginning to parse any STIX documents:
.. code-block:: python
import stix.extensions.marking.ais
"""
from mixbox import fields
from mixbox.namespaces import Namespace
import stix.bindings.extensions.marking.ais as ais_binding
import stix.data_marking
from stix.data_marking import MarkingStructure
def validate_value(instance, value):
allowed = instance._ALLOWED_VALUES
if not value:
return
elif not allowed:
return
elif value in allowed:
return
else:
error = "Value must be one of {allowed}. Received '{value}'"
error = error.format(**locals())
raise ValueError(error)
class AISConsentType(stix.Entity):
_binding = ais_binding
_binding_class = _binding.AISConsentType
_namespace = 'http://www.us-cert.gov/STIXMarkingStructure#AISConsentMarking-2'
_ALLOWED_VALUES = ('EVERYONE', 'USG', 'NONE')
consent = fields.TypedField("consent", preset_hook=validate_value)
def __init__(self, consent=None):
super(AISConsentType, self).__init__()
self.consent = consent
class TLPMarkingType(stix.Entity):
_binding = ais_binding
_binding_class = _binding.TLPMarkingType
_namespace = 'http://www.us-cert.gov/STIXMarkingStructure#AISConsentMarking-2'
_ALLOWED_VALUES = ('WHITE', 'GREEN', 'AMBER')
color = fields.TypedField("color", preset_hook=validate_value)
def __init__(self, color=None):
super(TLPMarkingType, self).__init__()
self.color = color
class NotProprietary(stix.Entity):
_binding = ais_binding
_binding_class = _binding.NotProprietary
_namespace = 'http://www.us-cert.gov/STIXMarkingStructure#AISConsentMarking-2'
cisa_proprietary = fields.TypedField("CISA_Proprietary")
ais_consent = fields.TypedField("AISConsent", AISConsentType, key_name="ais_consent")
tlp_marking = fields.TypedField("TLPMarking", TLPMarkingType, key_name="tlp_marking")
def __init__(self, cisa_proprietary='false', ais_consent=None,
tlp_marking=None):
super(NotProprietary, self).__init__()
self.cisa_proprietary = cisa_proprietary
self.ais_consent = ais_consent
self.tlp_marking = tlp_marking
class IsProprietary(stix.Entity):
_binding = ais_binding
_binding_class = _binding.IsProprietary
_namespace = 'http://www.us-cert.gov/STIXMarkingStructure#AISConsentMarking-2'
cisa_proprietary = fields.TypedField("CISA_Proprietary")
ais_consent = fields.TypedField("AISConsent", AISConsentType, key_name="ais_consent")
tlp_marking = fields.TypedField("TLPMarking", TLPMarkingType, key_name="tlp_marking")
def __init__(self, cisa_proprietary='true', ais_consent=None,
tlp_marking=None):
super(IsProprietary, self).__init__()
self.cisa_proprietary = cisa_proprietary
self.ais_consent = ais_consent
self.tlp_marking = tlp_marking
@stix.register_extension
class AISMarkingStructure(MarkingStructure):
_binding = ais_binding
_binding_class = _binding.AISMarkingStructure
_namespace = 'http://www.us-cert.gov/STIXMarkingStructure#AISConsentMarking-2'
_XSI_TYPE = "AIS:AISMarkingStructure"
is_proprietary = fields.TypedField("Is_Proprietary", IsProprietary)
not_proprietary = fields.TypedField("Not_Proprietary", NotProprietary)
def __init__(self, is_proprietary=None, not_proprietary=None):
super(AISMarkingStructure, self).__init__()
self.is_proprietary = is_proprietary
self.not_proprietary = not_proprietary
NAMESPACES = [
Namespace('http://www.us-cert.gov/STIXMarkingStructure#AISConsentMarking-2', 'AIS', 'http://www.us-cert.gov/sites/default/files/STIX_Namespace/AIS_Bundle_Marking_1.1.1_v1.0.xsd')
]
def _update_namespaces():
# Update the python-stix namespace dictionary
from stix.utils import nsparser
import mixbox.namespaces
nsparser.STIX_NAMESPACES.add_namespace(NAMESPACES[0])
mixbox.namespaces.register_namespace(NAMESPACES[0])
_update_namespaces()
# IndustryType allowed sectors
#: Chemical Sector
CHEMICAL_SECTOR = 'Chemical Sector'
#: Chemical Sector
COMMERCIAL_FACILITIES_SECTOR = 'Commercial Facilities Sector'
#: Commercial Facilities Sector
COMMUNICATIONS_SECTOR = 'Communications Sector'
#: Critical Manufacturing Sector
CRITICAL_MANUFACTURING_SECTOR = 'Critical Manufacturing Sector'
#: Dams Sector
DAMS_SECTOR = 'Dams Sector'
#: Defense Industrial Base Sector
DEFENSE_INDUSTRIAL_BASE_SECTOR = 'Defense Industrial Base Sector'
#: Emergency Services Sector
EMERGENCY_SERVICES_SECTOR = 'Emergency Services Sector'
#: Energy Sector
ENERGY_SECTOR = 'Energy Sector'
#: Financial Services Sector
FINANCIAL_SERVICES_SECTOR = 'Financial Services Sector'
#: Food and Agriculture Sector
FOOD_AND_AGRICULTURE_SECTOR = 'Food and Agriculture Sector'
#: Government Facilities Sector
GOVERNMENT_FACILITIES_SECTOR = 'Government Facilities Sector'
#: Healthcare and Public Health Sector
HEALTH_CARE_AND_PUBLIC_HEALTH_SECTOR = 'Healthcare and Public Health Sector'
#: Information Technology Sector
INFORMATION_TECHNOLOGY_SECTOR = 'Information Technology Sector'
#: Nuclear Reactors, Materials, and Waste Sector
NUCLEAR_REACTORS_MATERIALS_AND_WASTE_SECTOR = 'Nuclear Reactors, Materials, and Waste Sector'
#: Other
OTHER = 'Other'
#: Transportation Systems Sector
TRANSPORTATION_SYSTEMS_SECTOR = 'Transportation Systems Sector'
#: Water and Wastewater Systems Sector
WATER_AND_WASTEWATER_SYSTEMS_SECTOR = 'Water and Wastewater Systems Sector'
def _validate_and_create_industry_type(industry_type):
INDUSTRY_SECTORS = (CHEMICAL_SECTOR, COMMERCIAL_FACILITIES_SECTOR,
COMMUNICATIONS_SECTOR, CRITICAL_MANUFACTURING_SECTOR,
DAMS_SECTOR, DEFENSE_INDUSTRIAL_BASE_SECTOR,
EMERGENCY_SERVICES_SECTOR, ENERGY_SECTOR,
FINANCIAL_SERVICES_SECTOR, FOOD_AND_AGRICULTURE_SECTOR,
GOVERNMENT_FACILITIES_SECTOR,
HEALTH_CARE_AND_PUBLIC_HEALTH_SECTOR,
INFORMATION_TECHNOLOGY_SECTOR,
NUCLEAR_REACTORS_MATERIALS_AND_WASTE_SECTOR,
TRANSPORTATION_SYSTEMS_SECTOR, OTHER,
WATER_AND_WASTEWATER_SYSTEMS_SECTOR)
lower_case_sectors = tuple(x.lower() for x in INDUSTRY_SECTORS)
result = ""
error = False
val = []
if isinstance(industry_type, str):
# Pipe-delimited or single string supplied.
val = [x.lower().strip() for x in industry_type.split("|")]
elif isinstance(industry_type, (list, tuple)):
# Create pipe-delimited string when list of strings is provided.
val = [x.lower().strip() for x in industry_type]
else:
error = True
for item in val:
for idx, sector in enumerate(lower_case_sectors):
if item == sector:
if not result:
result = INDUSTRY_SECTORS[idx]
else:
result = "{0}|{1}".format(result, INDUSTRY_SECTORS[idx])
break
else:
# The sectors collection was exhausted. No match found.
error = True
break
if not error and val:
return result
msg = 'IndustryType must be one of the following: {0}. Received \'{1}\'.'
raise ValueError(msg.format(INDUSTRY_SECTORS, industry_type))
def add_ais_marking(stix_package, proprietary, consent, color, **kwargs):
"""
This utility functions aids in the creation of an AIS marking and appends
it to the provided STIX package.
Args:
stix_package: A stix.core.STIXPackage object.
proprietary: True if marking uses IsProprietary, False for
NotProprietary.
consent: A string with one of the following values: "EVERYONE", "NONE"
or "USG".
color: A string that corresponds to TLP values: "WHITE", "GREEN" or
"AMBER".
**kwargs: Six required keyword arguments that are used to create a CIQ
identity object. These are: country_name_code,
country_name_code_type, admin_area_name_code,
admin_area_name_code_type, organisation_name, industry_type.
Raises:
ValueError: When keyword arguments are missing. User did not supply
correct values for: proprietary, color and consent.
Note:
The following line is required to register the AIS extension::
>>> import stix.extensions.marking.ais
Any Markings under STIX Header will be removed. Please follow the
guidelines for `AIS`_.
The industry_type keyword argument accepts: a list of string based on
defined sectors, a pipe-delimited string of sectors, or a single
sector.
.. _AIS:
https://www.us-cert.gov/ais
"""
from stix.common import InformationSource
from stix.extensions.identity.ciq_identity_3_0 import (
CIQIdentity3_0Instance, STIXCIQIdentity3_0, PartyName, Address,
Country, NameElement, OrganisationInfo, AdministrativeArea)
from stix.core.stix_header import STIXHeader
from stix.data_marking import MarkingSpecification, Marking
args = ('country_name_code', 'country_name_code_type', 'industry_type',
'admin_area_name_code', 'admin_area_name_code_type',
'organisation_name')
diff = set(args) - set(kwargs.keys())
if diff:
msg = 'All keyword arguments must be provided. Missing: {0}'
raise ValueError(msg.format(tuple(diff)))
party_name = PartyName()
party_name.add_organisation_name(kwargs['organisation_name'])
country = Country()
country_name = NameElement()
country_name.name_code = kwargs['country_name_code']
country_name.name_code_type = kwargs['country_name_code_type']
country.add_name_element(country_name)
admin_area = AdministrativeArea()
admin_area_name = NameElement()
admin_area_name.name_code = kwargs['admin_area_name_code']
admin_area_name.name_code_type = kwargs['admin_area_name_code_type']
admin_area.add_name_element(admin_area_name)
address = Address()
address.country = country
address.administrative_area = admin_area
org_info = OrganisationInfo()
org_info.industry_type = _validate_and_create_industry_type(kwargs['industry_type'])
id_spec = STIXCIQIdentity3_0()
id_spec.party_name = party_name
id_spec.add_address(address)
id_spec.organisation_info = org_info
identity = CIQIdentity3_0Instance()
identity.specification = id_spec
if proprietary is True:
proprietary_obj = IsProprietary()
consent = 'EVERYONE'
elif proprietary is False:
proprietary_obj = NotProprietary()
else:
raise ValueError('proprietary expected True or False.')
proprietary_obj.ais_consent = AISConsentType(consent=consent)
proprietary_obj.tlp_marking = TLPMarkingType(color=color)
ais_marking = AISMarkingStructure()
if isinstance(proprietary_obj, IsProprietary):
ais_marking.is_proprietary = proprietary_obj
else:
ais_marking.not_proprietary = proprietary_obj
marking_spec = MarkingSpecification()
marking_spec.controlled_structure = '//node() | //@*'
marking_spec.marking_structures.append(ais_marking)
marking_spec.information_source = InformationSource()
marking_spec.information_source.identity = identity
if not stix_package.stix_header:
stix_package.stix_header = STIXHeader()
# Removes any other Markings if present.
stix_package.stix_header.handling = Marking()
stix_package.stix_header.handling.add_marking(marking_spec)
|
STIXProject/python-stix
|
stix/extensions/marking/ais.py
|
Python
|
bsd-3-clause
| 12,265
|
[
"Amber"
] |
c9d092f78e59da8d712065b51291128a0a29a7042424eca431cbe95b5c728dc8
|
# Online haptic_map implementation
import pylab as pyl
import numpy as np
import matplotlib.pyplot as pp
#from enthought.mayavi import mlab
import scipy as scp
import scipy.ndimage as ni
import roslib; roslib.load_manifest('sandbox_tapo_darpa_m3')
import rospy
import tf
import os
#import hrl_lib.mayavi2_util as mu
import hrl_lib.viz as hv
import hrl_lib.util as ut
import hrl_lib.transforms as tr
import hrl_lib.matplotlib_util as mpu
import pickle
import unittest
import ghmm
import ghmmwrapper
import random
from hrl_haptic_manipulation_in_clutter_msgs.msg import SkinContact
from hrl_haptic_manipulation_in_clutter_msgs.msg import TaxelArray
from m3skin_ros.msg import TaxelArray as TaxelArray_Meka
from visualization_msgs.msg import Marker
from visualization_msgs.msg import MarkerArray
import sys
sys.path.insert(0, '/home/tapo/svn/robot1_data/usr/tapo/data_code/rapid_categorization/taxel_based/')
from data_variable_length_force_sample import Fmat_original, temp_num_fol, temp_num_trunk
def callback(data, callback_args):
rospy.loginfo('Getting data!')
# Fixing Transforms
tf_lstnr = callback_args
sc = SkinContact()
sc.header.frame_id = '/torso_lift_link' # has to be this and no other coord frame.
sc.header.stamp = data.header.stamp
t1, q1 = tf_lstnr.lookupTransform(sc.header.frame_id,
data.header.frame_id,
rospy.Time(0))
t1 = np.matrix(t1).reshape(3,1)
r1 = tr.quaternion_to_matrix(q1)
# Gathering Force Data
force_vectors = np.row_stack([data.values_x, data.values_y, data.values_z])
fmags_instant = ut.norm(force_vectors)
threshold = 0.01
force_arr = fmags_instant.reshape((16,24))
fmags_tuned = fmags_instant - threshold
fmags_tuned[np.where(fmags_tuned<0)]=0
fmags_instant_tuned = fmags_tuned
global fmags
for i in range(len(fmags_instant_tuned)):
if fmags_instant_tuned[i] > 0.0:
fmags[i].append(fmags_instant_tuned[i])
else:
fmags[i] = []
# Gathering Contact Data for Haptic Mapping
global global_contact_vector
for i in range(len(fmags_instant_tuned)):
global_contact_vector[i] = r1*((np.column_stack([data.centers_x[i], data.centers_y[i], data.centers_z[i]])).T) + t1
test_data()
global taxel_FLAG
for i in range(len(fmags_instant_tuned)):
if taxel_FLAG[i] > -1:
idx = taxel_FLAG[i]
contact_info = global_contact_vector[i]
pubdata(idx, contact_info)
def test_data():
# Do Stuff For Testing which basically returns which FLAG is true
global taxel_FLAG # -1 for not in Contact, 0 for Unknown (Red), 1 for Foliage (green), 2 for Trunk (brown)
# For Testing
global fmags
for i in range(384):
if (len(fmags[i]) > 0):
ts_obj = fmags[i]
final_ts_obj = ghmm.EmissionSequence(F,ts_obj)
# Find Viterbi Path
global model_ff
global model_tf
path_ff_obj = model_ff.viterbi(final_ts_obj)
path_tf_obj = model_tf.viterbi(final_ts_obj)
print path_ff_obj[1], path_tf_obj[1]
diff = abs(path_ff_obj[1]-path_tf_obj[1])
obj = max(path_ff_obj[1],path_tf_obj[1])
obj_min = min(abs(path_ff_obj[1]),abs(path_tf_obj[1]))
if ((obj == path_ff_obj[1]) and (diff > 10)):
#if ((obj == path_ff_obj[1]) and (obj_min > 1800)):
#if ((obj == path_ff_obj[1])):
print 'Taxel', i, 'is Foliage !'
taxel_FLAG[i] = 1
#elif ((obj == path_tf_obj[1]) and (obj_min > 1800)):
elif ((obj == path_tf_obj[1]) and (diff > 20)):
#elif ((obj == path_tf_obj[1])):
print 'Taxel', i, 'is Trunk !'
taxel_FLAG[i] = 2
#elif ((obj == path_tf_obj[1]):
#print 'Taxel', i, 'is Uncertain'
#taxel_FLAG[i] = 0
else:
taxel_FLAG[i] = 0
print 'Taxel', i, 'is Unknown'
else:
print 'Taxel', i, 'is not in Contact'
taxel_FLAG[i] = -1
def getdata():
rospy.loginfo('Initializing the Node !')
rospy.init_node('Online_Haptic_Map_Builder', anonymous=True)
tf_lstnr = tf.TransformListener()
rospy.loginfo('Waiting to Subscribe to the Skin Message...')
rospy.Subscriber("/skin_patch_forearm_right/taxels/forces", TaxelArray_Meka, callback, callback_args = (tf_lstnr))
rospy.spin()
def pubdata(idx, contact_info):
rospy.loginfo('Publishing data')
marker = Marker()
marker.ns = 'Haptic_Map_Markers'
marker.header.frame_id = '/torso_lift_link'
marker.type = marker.SPHERE
marker.action = marker.ADD
marker.scale.x = 0.02
marker.scale.y = 0.02
marker.scale.z = 0.02
if idx == 1:
# Green for Foliage
marker.color.a = 1.0;
marker.color.r = 0.0;
marker.color.g = 1.0;
marker.color.b = 0.0;
elif idx == 2:
# Brown for Trunk
marker.color.a = 1.0;
marker.color.r = 0.5;
marker.color.g = 0.25;
marker.color.b = 0.125;
else:
# Red for Unknown
marker.color.a = 0.0;
marker.color.r = 1.0;
marker.color.g = 0.0;
marker.color.b = 0.0;
marker.pose.orientation.w = 1.0
marker.pose.position.x = contact_info[0]
marker.pose.position.y = contact_info[1]
marker.pose.position.z = contact_info[2]
markerArray.markers.append(marker)
# Renumber the marker IDs
id = 0
for m in markerArray.markers:
m.id = id
id += 1
# Publish the MarkerArray
publisher.publish(markerArray)
#rospy.sleep(0.01)
if __name__ == '__main__':
topic = 'visualization_marker_array'
publisher = rospy.Publisher(topic, MarkerArray)
markerArray = MarkerArray()
print "Initializing the HMM Models"
# HMM Implementation
Fmat = Fmat_original
Foliage_Trials = temp_num_fol
Trunk_Trials = temp_num_trunk
# Getting mean / covariance
i = 0
number_states = 10
feature_1_final_data = [0.0]*number_states
state_1 = [0.0]
while (i < Foliage_Trials):
data_length = len(Fmat[i])
feature_length = data_length/1
sample_length = feature_length/number_states
Feature_1 = Fmat[i][0:feature_length]
if i == 0:
j = 0
while (j < number_states):
feature_1_final_data[j] = Feature_1[sample_length*j:sample_length*(j+1)]
j=j+1
else:
j = 0
while (j < number_states):
state_1 = Feature_1[sample_length*j:sample_length*(j+1)]
#print np.shape(state_1)
#print np.shape(feature_1_final_data[j])
feature_1_final_data[j] = feature_1_final_data[j]+state_1
j=j+1
i = i+1
j = 0
mu_ff_force = np.zeros((number_states,1))
sigma_ff = np.zeros((number_states,1))
while (j < number_states):
mu_ff_force[j] = np.mean(feature_1_final_data[j])
sigma_ff[j] = scp.std(feature_1_final_data[j])
j = j+1
i = Foliage_Trials
feature_1_final_data = [0.0]*number_states
state_1 = [0.0]
while (i < (Foliage_Trials + Trunk_Trials)):
data_length = len(Fmat[i])
feature_length = data_length/1
sample_length = feature_length/number_states
Feature_1 = Fmat[i][0:feature_length]
if i == Foliage_Trials:
j = 0
while (j < number_states):
feature_1_final_data[j] = Feature_1[sample_length*j:sample_length*(j+1)]
j=j+1
else:
j = 0
while (j < number_states):
state_1 = Feature_1[sample_length*j:sample_length*(j+1)]
feature_1_final_data[j] = feature_1_final_data[j]+state_1
j=j+1
i = i+1
j = 0
mu_tf_force = np.zeros((number_states,1))
sigma_tf = np.zeros((number_states,1))
while (j < number_states):
mu_tf_force[j] = np.mean(feature_1_final_data[j])
sigma_tf[j] = scp.std(feature_1_final_data[j])
j = j+1
# HMM - Implementation:
# 10 Hidden States
# Max. Force(For now), Contact Area(Not now), and Contact Motion(Not Now) as Continuous Gaussian Observations from each hidden state
# Four HMM-Models for Rigid-Fixed, Soft-Fixed, Rigid-Movable, Soft-Movable
# Transition probabilities obtained as upper diagonal matrix (to be trained using Baum_Welch)
# For new objects, it is classified according to which model it represenst the closest..
F = ghmm.Float() # emission domain of this model
# A - Transition Matrix
if number_states == 3:
A = [[0.2, 0.5, 0.3],
[0.0, 0.5, 0.5],
[0.0, 0.0, 1.0]]
elif number_states == 5:
A = [[0.2, 0.35, 0.2, 0.15, 0.1],
[0.0, 0.2, 0.45, 0.25, 0.1],
[0.0, 0.0, 0.2, 0.55, 0.25],
[0.0, 0.0, 0.0, 0.2, 0.8],
[0.0, 0.0, 0.0, 0.0, 1.0]]
elif number_states == 10:
A = [[0.1, 0.25, 0.15, 0.15, 0.1, 0.05, 0.05, 0.05, 0.05, 0.05],
[0.0, 0.1, 0.25, 0.25, 0.2, 0.1, 0.05, 0.05, 0.05, 0.05],
[0.0, 0.0, 0.1, 0.25, 0.25, 0.2, 0.05, 0.05, 0.05, 0.05],
[0.0, 0.0, 0.0, 0.1, 0.3, 0.30, 0.20, 0.1, 0.05, 0.05],
[0.0, 0.0, 0.0, 0.0, 0.1, 0.30, 0.30, 0.20, 0.05, 0.05],
[0.0, 0.0, 0.0, 0.0, 0.00, 0.1, 0.35, 0.30, 0.20, 0.05],
[0.0, 0.0, 0.0, 0.0, 0.00, 0.00, 0.2, 0.30, 0.30, 0.20],
[0.0, 0.0, 0.0, 0.0, 0.00, 0.00, 0.00, 0.2, 0.50, 0.30],
[0.0, 0.0, 0.0, 0.0, 0.00, 0.00, 0.00, 0.00, 0.4, 0.60],
[0.0, 0.0, 0.0, 0.0, 0.00, 0.00, 0.00, 0.00, 0.00, 1.00]]
elif number_states == 15:
A = [[0.1, 0.25, 0.15, 0.15, 0.1, 0.05, 0.05, 0.05, 0.04, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01],
[0.0, 0.1, 0.25, 0.25, 0.2, 0.1, 0.05, 0.05, 0.04, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01],
[0.0, 0.0, 0.1, 0.25, 0.25, 0.2, 0.05, 0.05, 0.04, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01],
[0.0, 0.0, 0.0, 0.1, 0.3, 0.30, 0.20, 0.1, 0.04, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01],
[0.0, 0.0, 0.0, 0.0, 0.1, 0.30, 0.30, 0.20, 0.04, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01],
[0.0, 0.0, 0.0, 0.0, 0.00, 0.1, 0.35, 0.30, 0.15, 0.05, 0.01, 0.01, 0.01, 0.01, 0.01],
[0.0, 0.0, 0.0, 0.0, 0.00, 0.00, 0.1, 0.30, 0.30, 0.10, 0.05, 0.05, 0.05, 0.03, 0.02],
[0.0, 0.0, 0.0, 0.0, 0.00, 0.00, 0.00, 0.1, 0.30, 0.30, 0.10, 0.05, 0.05, 0.05, 0.05],
[0.0, 0.0, 0.0, 0.0, 0.00, 0.00, 0.00, 0.00, 0.1, 0.30, 0.20, 0.15, 0.10, 0.10, 0.05],
[0.0, 0.0, 0.0, 0.0, 0.00, 0.00, 0.00, 0.00, 0.00, 0.1, 0.30, 0.20, 0.15, 0.15, 0.10],
[0.0, 0.0, 0.0, 0.0, 0.00, 0.00, 0.00, 0.00, 0.00, 0.0, 0.1, 0.30, 0.30, 0.20, 0.10],
[0.0, 0.0, 0.0, 0.0, 0.00, 0.00, 0.00, 0.00, 0.00, 0.0, 0.0, 0.1, 0.40, 0.30, 0.20],
[0.0, 0.0, 0.0, 0.0, 0.00, 0.00, 0.00, 0.00, 0.00, 0.0, 0.0, 0.0, 0.20, 0.50, 0.30],
[0.0, 0.0, 0.0, 0.0, 0.00, 0.00, 0.00, 0.00, 0.00, 0.0, 0.0, 0.0, 0.00, 0.40, 0.60],
[0.0, 0.0, 0.0, 0.0, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.0, 0.0, 0.0, 0.00, 1.00]]
elif number_states == 20:
A = [[0.1, 0.25, 0.15, 0.15, 0.1, 0.05, 0.05, 0.03, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01],
[0.0, 0.1, 0.25, 0.25, 0.2, 0.1, 0.05, 0.03, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01],
[0.0, 0.0, 0.1, 0.25, 0.25, 0.2, 0.05, 0.03, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01],
[0.0, 0.0, 0.0, 0.1, 0.3, 0.30, 0.20, 0.09, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01],
[0.0, 0.0, 0.0, 0.0, 0.1, 0.30, 0.30, 0.15, 0.04, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01],
[0.0, 0.0, 0.0, 0.0, 0.00, 0.1, 0.35, 0.30, 0.10, 0.05, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01],
[0.0, 0.0, 0.0, 0.0, 0.00, 0.00, 0.1, 0.30, 0.20, 0.10, 0.05, 0.05, 0.05, 0.03, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02],
[0.0, 0.0, 0.0, 0.0, 0.00, 0.00, 0.00, 0.1, 0.30, 0.20, 0.10, 0.05, 0.05, 0.05, 0.05, 0.02, 0.02, 0.02, 0.02, 0.02],
[0.0, 0.0, 0.0, 0.0, 0.00, 0.00, 0.00, 0.00, 0.1, 0.30, 0.20, 0.15, 0.05, 0.05, 0.05, 0.02, 0.02, 0.02, 0.02, 0.02],
[0.0, 0.0, 0.0, 0.0, 0.00, 0.00, 0.00, 0.00, 0.00, 0.1, 0.30, 0.20, 0.15, 0.10, 0.05, 0.02, 0.02, 0.02, 0.02, 0.02],
[0.0, 0.0, 0.0, 0.0, 0.00, 0.00, 0.00, 0.00, 0.00, 0.0, 0.1, 0.30, 0.30, 0.10, 0.10, 0.02, 0.02, 0.02, 0.02, 0.02],
[0.0, 0.0, 0.0, 0.0, 0.00, 0.00, 0.00, 0.00, 0.00, 0.0, 0.0, 0.1, 0.40, 0.30, 0.10, 0.02, 0.02, 0.02, 0.02, 0.02],
[0.0, 0.0, 0.0, 0.0, 0.00, 0.00, 0.00, 0.00, 0.00, 0.0, 0.0, 0.0, 0.20, 0.40, 0.20, 0.10, 0.04, 0.02, 0.02, 0.02],
[0.0, 0.0, 0.0, 0.0, 0.00, 0.00, 0.00, 0.00, 0.00, 0.0, 0.0, 0.0, 0.00, 0.20, 0.40, 0.20, 0.10, 0.05, 0.03, 0.02],
[0.0, 0.0, 0.0, 0.0, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.0, 0.0, 0.0, 0.00, 0.20, 0.40, 0.20, 0.10, 0.05, 0.05],
[0.0, 0.0, 0.0, 0.0, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.0, 0.0, 0.0, 0.00, 0.00, 0.20, 0.40, 0.20, 0.10, 0.10],
[0.0, 0.0, 0.0, 0.0, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.0, 0.0, 0.0, 0.00, 0.00, 0.00, 0.20, 0.40, 0.20, 0.20],
[0.0, 0.0, 0.0, 0.0, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.0, 0.0, 0.0, 0.00, 0.00, 0.00, 0.00, 0.30, 0.50, 0.20],
[0.0, 0.0, 0.0, 0.0, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.0, 0.0, 0.0, 0.00, 0.00, 0.00, 0.00, 0.00, 0.40, 0.60],
[0.0, 0.0, 0.0, 0.0, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.0, 0.0, 0.0, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 1.00]]
# B - Emission Matrix, parameters of emission distributions in pairs of (mu, sigma)
B_ff = [0.0]*number_states
B_tf = [0.0]*number_states
for num_states in range(number_states):
B_ff[num_states] = [mu_ff_force[num_states][0],sigma_ff[num_states][0]]
B_tf[num_states] = [mu_tf_force[num_states][0],sigma_tf[num_states][0]]
# pi - initial probabilities per state
if number_states == 3:
pi = [1./3.] * 3
elif number_states == 5:
pi = [0.2] * 5
elif number_states == 10:
pi = [0.1] * 10
elif number_states == 15:
pi = [1./15.] * 15
elif number_states == 20:
pi = [0.05] * 20
# generate FF, TF models from parameters
model_ff = ghmm.HMMFromMatrices(F,ghmm.GaussianDistribution(F), A, B_ff, pi) # Will be Trained
model_tf = ghmm.HMMFromMatrices(F,ghmm.GaussianDistribution(F), A, B_tf, pi) # Will be Trained
total_seq = Fmat
for i in range((Foliage_Trials + Trunk_Trials)):
total_seq[i][:] = sum(total_seq[i][:],[])
total_seq_ff = total_seq[0:Foliage_Trials]
total_seq_tf = total_seq[Foliage_Trials:Foliage_Trials + Trunk_Trials]
#print len(total_seq_ff)
#print len(total_seq_tf)
print "Training the HMM Models..."
train_seq_ff = total_seq_ff
train_seq_tf = total_seq_tf
final_ts_ff = ghmm.SequenceSet(F,train_seq_ff)
final_ts_tf = ghmm.SequenceSet(F,train_seq_tf)
model_ff.baumWelch(final_ts_ff)
model_tf.baumWelch(final_ts_tf)
print "Models Trained: Ready to Collect Data !"
# Gather Data from Robot Online
taxel_FLAG = {}
for i in range(384):
taxel_FLAG[i] = -1 # -1 for not in Contact, 0 for Unknown (Red), 1 for Foliage (green), 2 for Trunk (brown)
fmags = {}
for i in range(384):
fmags[i] = []
global_contact_vector = {}
for i in range(384):
global_contact_vector[i] = []
FLAG_Trunk = False
FLAG_Foliage = False
FLAG_Unknown = True
getdata()
|
tapomayukh/projects_in_python
|
rapid_categorization/haptic_map/online_haptic_map_taxel_based.py
|
Python
|
mit
| 16,270
|
[
"Gaussian",
"Mayavi"
] |
f5db77a2a8ea05410c80edcac957a1221bcc932cea949a659b11ea824af10413
|
from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
repository_name = 'filtering_0410'
repository_description = 'Galaxy filtering tool for test 0410'
repository_long_description = 'Long description of Galaxy filtering tool for test 0410'
'''
1. Create a repository in the tool shed owned by test_user_1.
2. Have test_user_2 complete a review of the repository.
3. Have test_user_1 browse the review.
4. Have test_user_3 browse the repository and make sure they are not allowed to browse the review.
5. Have test_user_1 give write permission on the repository to the test_user_3.
6. Have test_user_3 browse the repository again and they should now have the ability to browse the review.
7. Have test_user_3 browse the review.
'''
class TestRepositoryComponentReviews( ShedTwillTestCase ):
'''Test repository component review features.'''
def test_0000_initiate_users( self ):
"""Create necessary user accounts and login as an admin user."""
"""
Create all the user accounts that are needed for this test script to run independently of other test.
Previously created accounts will not be re-created.
"""
self.logout()
self.login( email=common.test_user_1_email, username=common.test_user_1_name )
test_user_1 = self.test_db_util.get_user( common.test_user_1_email )
assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
test_user_1_private_role = self.test_db_util.get_private_role( test_user_1 )
self.logout()
self.login( email=common.test_user_2_email, username=common.test_user_2_name )
test_user_2 = self.test_db_util.get_user( common.test_user_2_email )
assert test_user_2 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_2_email
test_user_2_private_role = self.test_db_util.get_private_role( test_user_2 )
self.logout()
self.login( email=common.test_user_3_email, username=common.test_user_3_name )
test_user_3 = self.test_db_util.get_user( common.test_user_3_email )
assert test_user_3 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_3_email
test_user_3_private_role = self.test_db_util.get_private_role( test_user_3 )
self.logout()
self.login( email=common.admin_email, username=common.admin_username )
admin_user = self.test_db_util.get_user( common.admin_email )
assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
admin_user_private_role = self.test_db_util.get_private_role( admin_user )
def test_0005_grant_reviewer_role( self ):
'''Grant the repository reviewer role to test_user_2.'''
"""
We now have an admin user (admin_user) and three non-admin users (test_user_1, test_user_2, and test_user_3). Grant the repository
reviewer role to test_user_2, who will not be the owner of the reviewed repositories, and do not grant any roles to test_user_3 yet.
"""
reviewer_role = self.test_db_util.get_role_by_name( 'Repository Reviewer' )
test_user_2 = self.test_db_util.get_user( common.test_user_2_email )
self.grant_role_to_user( test_user_2, reviewer_role )
def test_0010_verify_repository_review_components( self ):
'''Ensure that the required review components exist.'''
"""
Make sure all the components we are to review are recorded in the database.
"""
self.add_repository_review_component( name='Repository dependencies',
description='Repository dependencies defined in a file named repository_dependencies.xml included in the repository' )
strings_displayed=[ 'Data types', 'Functional tests', 'README', 'Repository dependencies', 'Tool dependencies', 'Tools', 'Workflows' ]
self.manage_review_components( strings_displayed=strings_displayed )
def test_0015_create_repository( self ):
"""Create and populate the filtering repository"""
"""
We are at step 1.
Log in as test_user_1 and create the filtering repository, then upload a basic set of
components to be reviewed in subsequent tests.
"""
category = self.create_category( name='Test 0400 Repository Component Reviews', description='Test 0400 Repository Component Reviews' )
self.logout()
self.login( email=common.test_user_1_email, username=common.test_user_1_name )
strings_displayed = [ 'Repository %s' % "'%s'" % repository_name,
'Repository %s has been created' % "<b>%s</b>" % repository_name ]
repository = self.get_or_create_repository( name=repository_name,
description=repository_description,
long_description=repository_long_description,
owner=common.test_user_1_name,
category_id=self.security.encode_id( category.id ),
strings_displayed=strings_displayed )
self.upload_file( repository,
filename='filtering/filtering_1.1.0.tar',
filepath=None,
valid_tools_only=True,
uncompress_file=True,
remove_repo_files_not_in_tar=False,
commit_message='Uploaded filtering 1.1.0 tarball.',
strings_displayed=[],
strings_not_displayed=[] )
self.upload_file( repository,
filename='filtering/filtering_test_data.tar',
filepath=None,
valid_tools_only=True,
uncompress_file=True,
remove_repo_files_not_in_tar=False,
commit_message='Uploaded filtering test data.',
strings_displayed=[],
strings_not_displayed=[] )
self.upload_file( repository,
filename='readme.txt',
filepath=None,
valid_tools_only=True,
uncompress_file=True,
remove_repo_files_not_in_tar=False,
commit_message='Uploaded readme.txt.',
strings_displayed=[],
strings_not_displayed=[] )
def test_0020_review_repository( self ):
'''Complete a review of the filtering repository.'''
'''
We are at step 2 - Have test_user_2 complete a review of the repository.
Review all components of the filtering repository, with the appropriate contents and approved/not approved/not applicable status.
'''
self.logout()
self.login( email=common.test_user_2_email, username=common.test_user_2_name )
repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
user = self.test_db_util.get_user( common.test_user_2_email )
review_contents_dict = {
'Data types': dict(),
'README': dict( rating=5, comment='Clear and concise readme file, a true pleasure to read.', approved='yes', private='no' ),
'Functional tests': dict( rating=5, comment='A good set of functional tests.', approved='yes', private='no' ),
'Repository dependencies': dict(),
'Tool dependencies': dict(),
'Tools': dict( rating=5, comment='Excellent tool, easy to use.', approved='yes', private='no' ),
'Workflows': dict()
}
self.create_repository_review( repository, review_contents_dict )
def test_0025_verify_repository_review( self ):
'''Verify that the review was completed and displays properly.'''
'''
We are at step 3 - Have test_user_1 browse the review.
Verify that all the review components were submitted, and that the repository owner can see the review.
'''
self.logout()
self.login( email=common.test_user_1_email, username=common.test_user_1_name )
repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
user = self.test_db_util.get_user( common.test_user_2_email )
strings_displayed = [ 'Data types', 'Functional tests', 'yes', 'A good set of functional tests.', 'README', 'yes', 'Workflows', 'Tools' ]
strings_displayed.extend( [ 'Clear and concise readme file, a true pleasure to read.', 'Tool dependencies', 'not_applicable' ] )
strings_displayed.extend( [ 'Repository dependencies', 'Excellent tool, easy to use.' ] )
strings_displayed = [ 'Browse reviews of this repository' ]
self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
self.verify_repository_reviews( repository, reviewer=user, strings_displayed=strings_displayed )
def test_0030_browse_with_other_user( self ):
'''Verify that test_user_3 is blocked from browsing the review.'''
'''
We are at step 4 - Have test_user_3 browse the repository and make sure they are not allowed to browse the review.
'''
self.logout()
self.login( email=common.test_user_3_email, username=common.test_user_3_name )
repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
user = self.test_db_util.get_user( common.test_user_2_email )
strings_not_displayed = [ 'Browse reviews of this repository' ]
self.display_manage_repository_page( repository, strings_not_displayed=strings_not_displayed )
strings_not_displayed = [ 'A good set of functional tests.', 'Clear and concise readme file, a true pleasure to read.' ]
strings_not_displayed.append( 'Excellent tool, easy to use.' )
changeset_revision = self.get_repository_tip( repository )
review = self.test_db_util.get_repository_review_by_user_id_changeset_revision( user.id, repository.id, changeset_revision )
self.browse_component_review( review, strings_not_displayed=strings_not_displayed )
def test_0035_grant_write_access_to_other_user( self ):
'''Grant write access on the filtering_0410 repository to test_user_3.'''
'''
We are at step 5 - Have test_user_1 give write permission on the repository to the test_user_3.
'''
self.logout()
self.login( email=common.test_user_1_email, username=common.test_user_1_name )
repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.grant_write_access( repository, usernames=[ common.test_user_3_name ] )
def test_0040_verify_test_user_3_can_browse_reviews( self ):
'''Check that test_user_3 can now browse reviews.'''
'''
We are at step 6 - Have test_user_3 browse the repository again and they should now have the ability to browse the review.
'''
self.logout()
self.login( email=common.test_user_3_email, username=common.test_user_3_name )
repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
strings_displayed = [ 'Browse reviews of this repository' ]
self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
def test_0045_verify_browse_review_with_write_access( self ):
'''Check that test_user_3 can now display reviews.'''
'''
We are at step 7 - Have test_user_3 browse the review.
'''
self.logout()
self.login( email=common.test_user_3_email, username=common.test_user_3_name )
repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
user = self.test_db_util.get_user( common.test_user_2_email )
strings_displayed = [ 'A good set of functional tests.',
'Clear and concise readme file',
'a true pleasure to read.',
'Excellent tool, easy to use.' ]
changeset_revision = self.get_repository_tip( repository )
review = self.test_db_util.get_repository_review_by_user_id_changeset_revision( user.id, repository.id, changeset_revision )
self.browse_component_review( review, strings_displayed=strings_displayed )
|
mikel-egana-aranguren/SADI-Galaxy-Docker
|
galaxy-dist/test/tool_shed/functional/test_0410_repository_component_review_access_control.py
|
Python
|
gpl-3.0
| 13,119
|
[
"Galaxy"
] |
f5ed46c2749809379e61c12d7c8d79d43ab65b6db367fdb27875c1adeb08c706
|
import matplotlib.pyplot as plt
from matplotlib import cm
import pandas as pd
from sklearn.neighbors import kneighbors_graph
import networkx as nx
import numpy as np
from sklearn import preprocessing
# Function to do some comparison between professionals and students
def plot_stud_prof(prof_stack="", stud_stack="", prof="", stud="", column="", title=""):
if column!="":
prof=prof_stack[column]
stud=stud_stack[column]
p = prof.value_counts(normalize=True)[:10]
v_stud = stud.value_counts(normalize=True)
s = v_stud.loc[p.index]
df = pd.DataFrame([p, s])
df = df.T
df.columns = ["Professional", "Student"]
df.plot.bar(figsize=(7,7))
plt.title(title + ' distribution for Professionals/Students')
plt.ylabel("Ratio")
plt.show()
# We filter out the devs upon some criteria
def row_filter(stack, row):
if row.Professional not in ["Student",
"Professional developer"]:
return False
if row.Professional == "Professional developer":
if row.EmploymentStatus not in ['Employed part-time',
'Employed full-time',
'Independent contractor, freelancer, or self-employed']:
return False
# After checking salary values, we decided to remove the first 5%
# quantile as they were mostly outliers (values inbetween 0 and 100)
if row.isnull().Salary or row.Salary < stack.Salary.quantile(0.05):
return False
if row.isnull().JobSatisfaction and row.isnull().CareerSatisfaction:
return False
else:
if row.isnull().ExpectedSalary or row.ExpectedSalary < stack.ExpectedSalary.quantile(0.05):
return False
return True
#Dummies the dataframe
def dummies(df, columns, special_col):
for sub in columns:
df[sub] = df[sub].apply(lambda x: str(x).replace(" ", "").split(";"))
if sub == special_col:
df[sub] = df[sub].apply(lambda x: ["Want_" + s for s in x])
df = pd.concat([df, pd.get_dummies(pd.DataFrame(df[sub].tolist(), index=df.index).stack()).sum(level=0)], axis=1).drop(sub, axis=1)
df = pd.get_dummies(df)
return df
#Preprocess dataframe (dummies and nan)
def preprocessed(df, columns, special_col, prof):
df = df.dropna()
final_df = dummies(df.copy(), columns, special_col)
if prof:
final_df.JobSatisfaction /= 10
final_df.CareerSatisfaction /= 10
return final_df, df
#Compute knn graph using sklearn
def compute_knn_graph(df):
graph = kneighbors_graph(df, int(np.sqrt(df.shape[0])), mode='distance', include_self=True)
graph.data = np.exp(- graph.data ** 2 / (2. * np.mean(graph.data) ** 2))
return graph
#Draw corresponding graph using networkX
def draw_graph(graph, title):
G = nx.from_scipy_sparse_matrix(graph,edge_attribute='similarity')
pos = nx.spring_layout(G)
plt.figure(1,figsize=(10,10))
nx.draw_networkx_nodes(G, pos, node_size=7, node_color='lightblue')
plt.title(title)
plt.show()
return G, pos
#Encode string label to int
def encode_label(df, features):
mapping_prof = []
df_encode = df.copy()
for c in features:
le = preprocessing.LabelEncoder()
le.fit(df_encode[c])
df_encode[c] = le.transform(df_encode[c])
le_name_mapping = dict(zip(le.classes_, le.transform(le.classes_)))
mapping_prof.append(le_name_mapping)
return mapping_prof, df_encode
#Plot graph by features
def draw_features(important_features, df, mapping, G, pos, type_):
for i,features in enumerate(important_features):
#print(features)
f = plt.figure(1,figsize=(10,10))
norm = plt.Normalize()
cmap = plt.get_cmap('Set2')
c = cmap(norm(list(df[features])))
if i in [0,2]:
scalarMap = cm.ScalarMappable(norm=norm, cmap=cmap)
ax = f.add_subplot(1,1,1)
for label in mapping[i]:
ax.plot([0],[0],color=scalarMap.to_rgba(mapping[i][label]),label=label)
nx.draw_networkx_nodes(G, pos, node_color=c, node_size=20)
plt.legend()
plt.title(features + " coloring for " + type_ + " network")
plt.show()
#Draw the neighbors of a certain node
def draw_neighbors(G, pos, node, title):
color = ['lightblue'] * len(G.node)
color[node] = 'k'
for n in G.neighbors(node):
if n != node:
color[n] = 'r'
plt.figure(1,figsize=(10,10))
nx.draw_networkx_nodes(G, pos, node_color=color, node_size=20)
plt.title(title)
plt.show()
MAP_COUNTRIES = {'Afghanistan':'AFG',
'Aland Islands':'ALA','Albania':'ALB','Algeria':'DZA','American Samoa':'ASM','Andorra':'AND','Angola':'AGO','Anguilla':'AIA','Antigua and Barbuda':'ATG',
'Antarctica':'ATA','Argentina':'ARG','Armenia':'ARM','Aruba':'ABW','Australia':'AUS','Austria':'AUT','Azerbaijan':'AZE','Azerbaidjan':'AZE',
'Bahrain':'BHR','Bahamas':'BHS','Bangladesh':'BGD','Barbados':'BRB','Belarus':'BLR',
'Belgium':'BEL','Belize':'BLZ','Benin':'BEN','Bermuda':'BMU','Bhutan':'BTN','Bolivia':'BOL','Bosnia and Herzegovina':'BIH','Bosnia-Herzegovina':'BIH',
'Botswana':'BWA','Bouvet Island':'BVT','Brazil':'BRA','British Virgin Islands':'VGB','British Indian Ocean Territory':'IOT',
'Brunei':'BRN','Brunei Darussalam':'BRN','Bulgaria':'BGR','Burkina Faso':'BFA','Burma':'MMR',
'Burundi':'BDI','Cabo Verde':'CPV','Cape Verde':'CPV','Cambodia':'KHM','Cameroon':'CMR',
'Canada':'CAN','Cayman Islands':'CYM','Central African Republic':'CAF','Chad':'TCD','Chile':'CHL',
'Christmas Island':'CHR','China':'CHN','Colombia':'COL','Comoros':'COM','Congo, Democratic Republic of the':'COD',
'Congo, Republic of the':'COG','Cook Islands':'COK','Costa Rica':'CRI','Cote d\'Ivoire':'CIV',
"Ivory Coast (Cote D'Ivoire)":'CIV','Croatia':'HRV','Cuba':'CUB','Curacao':'CUW','Cyprus':'CYP',
'Czech Republic':'CZE','Denmark':'DNK','Djibouti':'DJI','Dominica':'DMA','Dominican Republic':'DOM',
'Ecuador':'ECU','Egypt':'EGY','El Salvador':'SLV','Equatorial Guinea':'GNQ','Eritrea':'ERI','Estonia':'EST',
'Ethiopia':'ETH','Falkland Islands (Islas Malvinas)':'FLK','Falkland Islands':'FLK','Faroe Islands':'FRO',
'Fiji':'FJI','Finland':'FIN','France':'FRA','French Polynesia':'PYF','Gabon':'GAB',
'Gambia, The':'GMB','Georgia':'GEO','Germany':'DEU','Ghana':'GHA','Gibraltar':'GIB',
'Greece':'GRC','Greenland':'GRL','Grenada':'GRD','Guam':'GUM','Guatemala':'GTM',
'Guernsey':'GGY','Guinea-Bissau':'GNB','Guinea':'GIN','Guyana':'GUY','French Guyana':'GUY','Haiti':'HTI',
'Honduras':'HND','Heard and McDonald Islands':'HMD','Hong Kong':'HKG','Hungary':'HUN','Iceland':'ISL',
'India':'IND','Indonesia':'IDN','Iran':'IRN','Iraq':'IRQ','Ireland':'IRL','Isle of Man':'IMN',
'Israel':'ISR','Italy':'ITA','Jamaica':'JAM','Japan':'JPN','Jersey':'JEY','Jordan':'JOR',
'Kazakhstan':'KAZ','Kenya':'KEN','Kiribati':'KIR','Korea, North':'KOR','Korea, South':'PRK',
'South Korea':'PRK','North Korea':'KOR','Kosovo':'KSV','Kuwait':'KWT','Kyrgyzstan':'KGZ',
'Laos':'LAO','Latvia':'LVA','Lebanon':'LBN','Lesotho':'LSO','Liberia':'LBR','Libya':'LBY','Liechtenstein':'LIE',
'Lithuania':'LTU','Luxembourg':'LUX','Macau':'MAC','Macedonia':'MKD','Madagascar':'MDG',
'Malawi':'MWI','Malaysia':'MYS','Maldives':'MDV','Mali':'MLI','Malta':'MLT','Marshall Islands':'MHL',
'Martinique (French)':'MTQ','Mauritania':'MRT','Mauritius':'MUS','Mexico':'MEX','Micronesia, Federated States of':'FSM',
'Moldova':'MDA','Moldavia':'MDA','Monaco':'MCO','Mongolia':'MNG','Montenegro':'MNE','Montserrat':'MSR',
'Morocco':'MAR','Mozambique':'MOZ','Myanmar':'MMR','Namibia':'NAM','Nepal':'NPL','Netherlands':'NLD',
'Netherlands Antilles':'ANT','New Caledonia':'NCL','New Caledonia (French)':'NCL','New Zealand':'NZL','Nicaragua':'NIC',
'Nigeria':'NGA','Niger':'NER','Niue':'NIU','Northern Mariana Islands':'MNP','Norway':'NOR','Oman':'OMN',
'Pakistan':'PAK','Palau':'PLW','Panama':'PAN','Papua New Guinea':'PNG','Paraguay':'PRY','Peru':'PER',
'Philippines':'PHL','Pitcairn Island':'PCN','Poland':'POL','Polynesia (French)':'PYF','Portugal':'PRT',
'Puerto Rico':'PRI','Qatar':'QAT','Reunion (French)':'REU','Romania':'ROU','Russia':'RUS','Russian Federation':'RUS',
'Rwanda':'RWA','Saint Kitts and Nevis':'KNA','Saint Lucia':'LCA','Saint Martin':'MAF','Saint Pierre and Miquelon':'SPM',
'Saint Vincent and the Grenadines':'VCT','Saint Vincent & Grenadines':'VCT','S. Georgia & S. Sandwich Isls.':'SGS','Samoa':'WSM',
'San Marino':'SMR','Saint Helena':'SHN','Sao Tome and Principe':'STP','Saudi Arabia':'SAU','Senegal':'SEN',
'Serbia':'SRB','Seychelles':'SYC','Sierra Leone':'SLE','Singapore':'SGP','Sint Maarten':'SXM',
'Slovakia':'SVK','Slovak Republic':'SVK','Slovenia':'SVN','Solomon Islands':'SLB','Somalia':'SOM','South Africa':'ZAF',
'South Sudan':'SSD','Spain':'ESP','Sri Lanka':'LKA','Sudan':'SDN','Suriname':'SUR',
'Swaziland':'SWZ','Sweden':'SWE','Switzerland':'CHE','Syria':'SYR','Taiwan':'TWN','Tajikistan':'TJK',
'Tadjikistan':'TJK','Tanzania':'TZA','Thailand':'THA','Timor-Leste':'TLS','Togo':'TGO','Tonga':'TON',
'Trinidad and Tobago':'TTO','Tunisia':'TUN','Turkey':'TUR','Turkmenistan':'TKM','Tuvalu':'TUV',
'Uganda':'UGA','Ukraine':'UKR','United Arab Emirates':'ARE','United Kingdom':'GBR','United States':'USA',
'U.S. Minor Outlying Islands':'UMI','Uruguay':'URY','Uzbekistan':'UZB','Vanuatu':'VUT',
'Vatican City State':'VAT','Venezuela':'VEN','Vietnam':'VNM','Virgin Islands':'VGB',
'Virgin Islands (USA)':'VIR','Virgin Islands (British)':'VGB','West Bank':'WBG','Yemen':'YEM',
'Zaire':'ZAR','Zambia':'ZMB','Zimbabwe':'ZWE'}
|
mdeff/ntds_2017
|
projects/reports/stackoverflow_recommendation/helper_functions.py
|
Python
|
mit
| 9,671
|
[
"BWA"
] |
3cbf94923e33d2d2798d9502de345c0738d4eec2d921d56a843c74a765ab6b5c
|
#
# QAPI parser test harness
#
# Copyright (c) 2013 Red Hat Inc.
#
# Authors:
# Markus Armbruster <armbru@redhat.com>
#
# This work is licensed under the terms of the GNU GPL, version 2 or later.
# See the COPYING file in the top-level directory.
#
from qapi import *
from pprint import pprint
import os
import sys
class QAPISchemaTestVisitor(QAPISchemaVisitor):
def visit_enum_type(self, name, info, values, prefix):
print 'enum %s %s' % (name, values)
if prefix:
print ' prefix %s' % prefix
def visit_object_type(self, name, info, base, members, variants):
print 'object %s' % name
if base:
print ' base %s' % base.name
for m in members:
print ' member %s: %s optional=%s' % \
(m.name, m.type.name, m.optional)
self._print_variants(variants)
def visit_alternate_type(self, name, info, variants):
print 'alternate %s' % name
self._print_variants(variants)
def visit_command(self, name, info, arg_type, ret_type,
gen, success_response):
print 'command %s %s -> %s' % \
(name, arg_type and arg_type.name, ret_type and ret_type.name)
print ' gen=%s success_response=%s' % (gen, success_response)
def visit_event(self, name, info, arg_type):
print 'event %s %s' % (name, arg_type and arg_type.name)
@staticmethod
def _print_variants(variants):
if variants:
if variants.tag_name:
print ' tag %s' % variants.tag_name
for v in variants.variants:
print ' case %s: %s' % (v.name, v.type.name)
schema = QAPISchema(sys.argv[1])
schema.visit(QAPISchemaTestVisitor())
|
coloft/qemu
|
tests/qapi-schema/test-qapi.py
|
Python
|
gpl-2.0
| 1,749
|
[
"VisIt"
] |
6e70fbf39f4c5ad9b077d113c3242ffa39b37bb70a3bae169527492396730bbe
|
"""
Created on 30/01/2013
@author: thom
"""
import random
import logging
import numpy as np
from rdkit.Chem import AllChem as Chem
import xml.etree.cElementTree as ElementTree
from population import Population
from molecule import Molecule # only used to get canonical SMILES representation for a molecule
class MolecularPopulation(Population):
"""Population subclass where items must be SMILES strings; method parameters expect either SMILES or Rdkit Mol values"""
def __init__(self, xml=None, population=None, reactions=None, size=0):
"""As for Population, except we expect all population items to be in SMILES. We automatically standardise these -
all hydrogens are made explicit, and the item is converted to canonical SMILES.
:param size: attempt to resize to include only this many time-stamps"""
if xml is not None:
super(MolecularPopulation, self).__init__()
self._load_population_from_xml(xml)
else:
super(MolecularPopulation, self).__init__(population=population)
if reactions is not None:
self._sample_reactions(reactions, size)
def _load_population_from_xml(self, xml):
logging.info("Loading population from XML")
self.set_t(0)
for xml in ElementTree.fromstring(xml).findall('setvalue'):
item = xml.attrib['item'].encode("utf-8")
# make items canonical via Molecule! Important! Otherwise N(=O)[O], say, in XML will be a different element from [O]N=O
self.set_quantity(item=Chem.MolToSmiles(Molecule(item)), quantity=int(xml.attrib['quantity']))
if len(self._index) == 0 or len(self._t) == 0:
raise ValueError('No population found in XML')
else:
logging.info("Population loaded - {} unique items, {} total population size".format(len(self._index), self.get_population_size()))
def _apply_reactions(self, reactions):
count = 1
for reaction in reactions:
self.set_t(reaction['t']) # use count rather than 'iteration' as collisions don't increment 'iteration' but do appear in reactions list
for reactant in reaction['reactants']:
self.set_quantity(reactant['smiles'], self.get_quantity(reactant['smiles']) - 1)
for product in reaction['products']:
self.set_quantity(product['smiles'], self.get_quantity(product['smiles']) + 1)
if count % 1000 == 0:
logging.debug("Added reactions up to {}".format(count))
count += 1
def _sample_reactions(self, const_reactions, size):
"""Apply a set of reactions in form of a list of reactions, where each
reaction = {'iteration':iteration, 'reactants':[reactants], 'products':[products]}
Quantities of each reactant and each product are assumed to be 1
:param size: the maximum number of iterations that will be found in the final population.
We conduct a sample at regular intervals. If None then no sampling is done.
"""
reactions = const_reactions[:]
logging.debug("Calculating unique elements...")
elements = set(self.get_items()) # COPY initial items
for reaction in reactions:
elements.update([product['smiles'] for product in reaction['products']])
number_of_unique_elements = len(elements)
logging.debug("Resizing population to {} elements".format(number_of_unique_elements))
incremental_shape = (0, max(0, number_of_unique_elements - self._population.shape[1]))
self._expand_population(incremental_shape)
logging.debug("Applying reactions...this may take some time...")
count_t = len(set([reaction['t'] for reaction in reactions]))
if size == 0 or count_t < size:
self._apply_reactions(reactions)
else:
final_t = reactions[-1]['t']
block_size = final_t / (size * 1.0)
sample_population = MolecularPopulation(population=self.get_last_slice()) # start with an initial population
self._population = self._population[0:len(self._t), :] # because using vstack to add on samples start with just the initial population
logging.debug("Shape is now {}, and sample block_size is {}".format(self._population.shape, block_size))
reactions.reverse()
block_end = block_size
while len(reactions) > 0:
reaction = reactions.pop()
sample_population.set_t(reaction['t'])
for reactant in reaction['reactants']:
sample_population.set_quantity(reactant['smiles'], sample_population.get_quantity(reactant['smiles']) - 1)
for product in reaction['products']:
sample_population.set_quantity(product['smiles'], sample_population.get_quantity(product['smiles']) + 1)
# print(reaction['t'], block_end)
if reaction['t'] >= block_end or len(reactions) == 0:
block_end += block_size
# transfer final state of this block to the real population
# at this point we can be sure that the final_population and the full population (in self)
# are of the same 'y' dimension (elements) and that any elements in self will be in the same
# position in final_population (as it is an incremental copy) therefore we can do the transfer by
# 1) updating set_t of self, 2) appending the nparray from final_population to the nparray
# of self and 3) replacing the index of self by the updated index of final_population
final_population = sample_population.get_last_slice()
self._t.append(final_population.get_times()[-1]) # should only be one time in the final_population...
self._population = np.vstack((self._population, final_population._population))
self._index = final_population._index
# logging.info("Full population now of size {}".format(self._population.shape))
# reset the sample population, ready for next block of reactions
del sample_population
sample_population = MolecularPopulation(population=final_population.get_last_slice())
logging.info("Reactions applied")
def _get_molecules_matching_pattern(self, pattern):
"""Find all items in the population that contain the provided molecular pattern.
:param pattern: A Mol descriptor of the pattern we are to attempt to match
:type pattern: Mol
:rtype:list of SMILES items in population matching the pattern"""
matches = [x for x in self._index if Chem.MolFromSmiles(x).HasSubstructMatch(pattern)]
return matches
def count_molecules_matching_pattern(self, pattern):
"""Count the number of molecules which contain the given fragment.
:param smarts_pattern: A Mol descriptor of the pattern we're interested in
:type smarts_pattern: Mol
:rtype: int
"""
quantities = [self.get_quantity(mol) for mol in self._get_molecules_matching_pattern(pattern)]
return sum(quantities)
def choose_molecule(self, pattern):
"""Select one reactant molecule (type Mol) with probability proportional to a molecule's share in the population.
:param pattern: pattern of interest
:type pattern: Mol
:rtype: Mol
"""
logging.debug("Choosing molecule matching {}".format(Chem.MolToSmiles(pattern)))
candidates = self._get_molecules_matching_pattern(pattern)
upper_bound = 0
for mol in candidates:
upper_bound += self.get_quantity(mol)
bound = random.random() * upper_bound
cumulative_sum = 0
for mol in candidates:
cumulative_sum += self.get_quantity(mol)
if cumulative_sum > bound:
logging.debug("Molecule chosen")
return Chem.MolFromSmiles(mol)
|
th0mmeke/toyworld
|
molecular_population.py
|
Python
|
gpl-3.0
| 8,174
|
[
"RDKit"
] |
33b40426f4127f7fad35400db132e1c3dd46ed20909ebe873b02690507272527
|
from collections import OrderedDict
import numpy as np
from robosuite.environments.manipulation.single_arm_env import SingleArmEnv
from robosuite.models.arenas import TableArena
from robosuite.models.objects import BoxObject
from robosuite.models.tasks import ManipulationTask
from robosuite.utils.mjcf_utils import CustomMaterial
from robosuite.utils.observables import Observable, sensor
from robosuite.utils.placement_samplers import UniformRandomSampler
from robosuite.utils.transform_utils import convert_quat
class Stack(SingleArmEnv):
"""
This class corresponds to the stacking task for a single robot arm.
Args:
robots (str or list of str): Specification for specific robot arm(s) to be instantiated within this env
(e.g: "Sawyer" would generate one arm; ["Panda", "Panda", "Sawyer"] would generate three robot arms)
Note: Must be a single single-arm robot!
env_configuration (str): Specifies how to position the robots within the environment (default is "default").
For most single arm environments, this argument has no impact on the robot setup.
controller_configs (str or list of dict): If set, contains relevant controller parameters for creating a
custom controller. Else, uses the default controller for this specific task. Should either be single
dict if same controller is to be used for all robots or else it should be a list of the same length as
"robots" param
gripper_types (str or list of str): type of gripper, used to instantiate
gripper models from gripper factory. Default is "default", which is the default grippers(s) associated
with the robot(s) the 'robots' specification. None removes the gripper, and any other (valid) model
overrides the default gripper. Should either be single str if same gripper type is to be used for all
robots or else it should be a list of the same length as "robots" param
initialization_noise (dict or list of dict): Dict containing the initialization noise parameters.
The expected keys and corresponding value types are specified below:
:`'magnitude'`: The scale factor of uni-variate random noise applied to each of a robot's given initial
joint positions. Setting this value to `None` or 0.0 results in no noise being applied.
If "gaussian" type of noise is applied then this magnitude scales the standard deviation applied,
If "uniform" type of noise is applied then this magnitude sets the bounds of the sampling range
:`'type'`: Type of noise to apply. Can either specify "gaussian" or "uniform"
Should either be single dict if same noise value is to be used for all robots or else it should be a
list of the same length as "robots" param
:Note: Specifying "default" will automatically use the default noise settings.
Specifying None will automatically create the required dict with "magnitude" set to 0.0.
table_full_size (3-tuple): x, y, and z dimensions of the table.
table_friction (3-tuple): the three mujoco friction parameters for
the table.
use_camera_obs (bool): if True, every observation includes rendered image(s)
use_object_obs (bool): if True, include object (cube) information in
the observation.
reward_scale (None or float): Scales the normalized reward function by the amount specified.
If None, environment reward remains unnormalized
reward_shaping (bool): if True, use dense rewards.
placement_initializer (ObjectPositionSampler): if provided, will
be used to place objects on every reset, else a UniformRandomSampler
is used by default.
has_renderer (bool): If true, render the simulation state in
a viewer instead of headless mode.
has_offscreen_renderer (bool): True if using off-screen rendering
render_camera (str): Name of camera to render if `has_renderer` is True. Setting this value to 'None'
will result in the default angle being applied, which is useful as it can be dragged / panned by
the user using the mouse
render_collision_mesh (bool): True if rendering collision meshes in camera. False otherwise.
render_visual_mesh (bool): True if rendering visual meshes in camera. False otherwise.
render_gpu_device_id (int): corresponds to the GPU device id to use for offscreen rendering.
Defaults to -1, in which case the device will be inferred from environment variables
(GPUS or CUDA_VISIBLE_DEVICES).
control_freq (float): how many control signals to receive in every second. This sets the amount of
simulation time that passes between every action input.
horizon (int): Every episode lasts for exactly @horizon timesteps.
ignore_done (bool): True if never terminating the environment (ignore @horizon).
hard_reset (bool): If True, re-loads model, sim, and render object upon a reset call, else,
only calls sim.reset and resets all robosuite-internal variables
camera_names (str or list of str): name of camera to be rendered. Should either be single str if
same name is to be used for all cameras' rendering or else it should be a list of cameras to render.
:Note: At least one camera must be specified if @use_camera_obs is True.
:Note: To render all robots' cameras of a certain type (e.g.: "robotview" or "eye_in_hand"), use the
convention "all-{name}" (e.g.: "all-robotview") to automatically render all camera images from each
robot's camera list).
camera_heights (int or list of int): height of camera frame. Should either be single int if
same height is to be used for all cameras' frames or else it should be a list of the same length as
"camera names" param.
camera_widths (int or list of int): width of camera frame. Should either be single int if
same width is to be used for all cameras' frames or else it should be a list of the same length as
"camera names" param.
camera_depths (bool or list of bool): True if rendering RGB-D, and RGB otherwise. Should either be single
bool if same depth setting is to be used for all cameras or else it should be a list of the same length as
"camera names" param.
camera_segmentations (None or str or list of str or list of list of str): Camera segmentation(s) to use
for each camera. Valid options are:
`None`: no segmentation sensor used
`'instance'`: segmentation at the class-instance level
`'class'`: segmentation at the class level
`'element'`: segmentation at the per-geom level
If not None, multiple types of segmentations can be specified. A [list of str / str or None] specifies
[multiple / a single] segmentation(s) to use for all cameras. A list of list of str specifies per-camera
segmentation setting(s) to use.
Raises:
AssertionError: [Invalid number of robots specified]
"""
def __init__(
self,
robots,
env_configuration="default",
controller_configs=None,
gripper_types="default",
initialization_noise="default",
table_full_size=(0.8, 0.8, 0.05),
table_friction=(1.0, 5e-3, 1e-4),
use_camera_obs=True,
use_object_obs=True,
reward_scale=1.0,
reward_shaping=False,
placement_initializer=None,
has_renderer=False,
has_offscreen_renderer=True,
render_camera="frontview",
render_collision_mesh=False,
render_visual_mesh=True,
render_gpu_device_id=-1,
control_freq=20,
horizon=1000,
ignore_done=False,
hard_reset=True,
camera_names="agentview",
camera_heights=256,
camera_widths=256,
camera_depths=False,
camera_segmentations=None, # {None, instance, class, element}
renderer="mujoco",
renderer_config=None,
):
# settings for table top
self.table_full_size = table_full_size
self.table_friction = table_friction
self.table_offset = np.array((0, 0, 0.8))
# reward configuration
self.reward_scale = reward_scale
self.reward_shaping = reward_shaping
# whether to use ground-truth object states
self.use_object_obs = use_object_obs
# object placement initializer
self.placement_initializer = placement_initializer
super().__init__(
robots=robots,
env_configuration=env_configuration,
controller_configs=controller_configs,
mount_types="default",
gripper_types=gripper_types,
initialization_noise=initialization_noise,
use_camera_obs=use_camera_obs,
has_renderer=has_renderer,
has_offscreen_renderer=has_offscreen_renderer,
render_camera=render_camera,
render_collision_mesh=render_collision_mesh,
render_visual_mesh=render_visual_mesh,
render_gpu_device_id=render_gpu_device_id,
control_freq=control_freq,
horizon=horizon,
ignore_done=ignore_done,
hard_reset=hard_reset,
camera_names=camera_names,
camera_heights=camera_heights,
camera_widths=camera_widths,
camera_depths=camera_depths,
camera_segmentations=camera_segmentations,
renderer=renderer,
renderer_config=renderer_config,
)
def reward(self, action):
"""
Reward function for the task.
Sparse un-normalized reward:
- a discrete reward of 2.0 is provided if the red block is stacked on the green block
Un-normalized components if using reward shaping:
- Reaching: in [0, 0.25], to encourage the arm to reach the cube
- Grasping: in {0, 0.25}, non-zero if arm is grasping the cube
- Lifting: in {0, 1}, non-zero if arm has lifted the cube
- Aligning: in [0, 0.5], encourages aligning one cube over the other
- Stacking: in {0, 2}, non-zero if cube is stacked on other cube
The reward is max over the following:
- Reaching + Grasping
- Lifting + Aligning
- Stacking
The sparse reward only consists of the stacking component.
Note that the final reward is normalized and scaled by
reward_scale / 2.0 as well so that the max score is equal to reward_scale
Args:
action (np array): [NOT USED]
Returns:
float: reward value
"""
r_reach, r_lift, r_stack = self.staged_rewards()
if self.reward_shaping:
reward = max(r_reach, r_lift, r_stack)
else:
reward = 2.0 if r_stack > 0 else 0.0
if self.reward_scale is not None:
reward *= self.reward_scale / 2.0
return reward
def staged_rewards(self):
"""
Helper function to calculate staged rewards based on current physical states.
Returns:
3-tuple:
- (float): reward for reaching and grasping
- (float): reward for lifting and aligning
- (float): reward for stacking
"""
# reaching is successful when the gripper site is close to the center of the cube
cubeA_pos = self.sim.data.body_xpos[self.cubeA_body_id]
cubeB_pos = self.sim.data.body_xpos[self.cubeB_body_id]
gripper_site_pos = self.sim.data.site_xpos[self.robots[0].eef_site_id]
dist = np.linalg.norm(gripper_site_pos - cubeA_pos)
r_reach = (1 - np.tanh(10.0 * dist)) * 0.25
# grasping reward
grasping_cubeA = self._check_grasp(gripper=self.robots[0].gripper, object_geoms=self.cubeA)
if grasping_cubeA:
r_reach += 0.25
# lifting is successful when the cube is above the table top by a margin
cubeA_height = cubeA_pos[2]
table_height = self.table_offset[2]
cubeA_lifted = cubeA_height > table_height + 0.04
r_lift = 1.0 if cubeA_lifted else 0.0
# Aligning is successful when cubeA is right above cubeB
if cubeA_lifted:
horiz_dist = np.linalg.norm(np.array(cubeA_pos[:2]) - np.array(cubeB_pos[:2]))
r_lift += 0.5 * (1 - np.tanh(horiz_dist))
# stacking is successful when the block is lifted and the gripper is not holding the object
r_stack = 0
cubeA_touching_cubeB = self.check_contact(self.cubeA, self.cubeB)
if not grasping_cubeA and r_lift > 0 and cubeA_touching_cubeB:
r_stack = 2.0
return r_reach, r_lift, r_stack
def _load_model(self):
"""
Loads an xml model, puts it in self.model
"""
super()._load_model()
# Adjust base pose accordingly
xpos = self.robots[0].robot_model.base_xpos_offset["table"](self.table_full_size[0])
self.robots[0].robot_model.set_base_xpos(xpos)
# load model for table top workspace
mujoco_arena = TableArena(
table_full_size=self.table_full_size,
table_friction=self.table_friction,
table_offset=self.table_offset,
)
# Arena always gets set to zero origin
mujoco_arena.set_origin([0, 0, 0])
# initialize objects of interest
tex_attrib = {
"type": "cube",
}
mat_attrib = {
"texrepeat": "1 1",
"specular": "0.4",
"shininess": "0.1",
}
redwood = CustomMaterial(
texture="WoodRed",
tex_name="redwood",
mat_name="redwood_mat",
tex_attrib=tex_attrib,
mat_attrib=mat_attrib,
)
greenwood = CustomMaterial(
texture="WoodGreen",
tex_name="greenwood",
mat_name="greenwood_mat",
tex_attrib=tex_attrib,
mat_attrib=mat_attrib,
)
self.cubeA = BoxObject(
name="cubeA",
size_min=[0.02, 0.02, 0.02],
size_max=[0.02, 0.02, 0.02],
rgba=[1, 0, 0, 1],
material=redwood,
)
self.cubeB = BoxObject(
name="cubeB",
size_min=[0.025, 0.025, 0.025],
size_max=[0.025, 0.025, 0.025],
rgba=[0, 1, 0, 1],
material=greenwood,
)
cubes = [self.cubeA, self.cubeB]
# Create placement initializer
if self.placement_initializer is not None:
self.placement_initializer.reset()
self.placement_initializer.add_objects(cubes)
else:
self.placement_initializer = UniformRandomSampler(
name="ObjectSampler",
mujoco_objects=cubes,
x_range=[-0.08, 0.08],
y_range=[-0.08, 0.08],
rotation=None,
ensure_object_boundary_in_range=False,
ensure_valid_placement=True,
reference_pos=self.table_offset,
z_offset=0.01,
)
# task includes arena, robot, and objects of interest
self.model = ManipulationTask(
mujoco_arena=mujoco_arena,
mujoco_robots=[robot.robot_model for robot in self.robots],
mujoco_objects=cubes,
)
def _setup_references(self):
"""
Sets up references to important components. A reference is typically an
index or a list of indices that point to the corresponding elements
in a flatten array, which is how MuJoCo stores physical simulation data.
"""
super()._setup_references()
# Additional object references from this env
self.cubeA_body_id = self.sim.model.body_name2id(self.cubeA.root_body)
self.cubeB_body_id = self.sim.model.body_name2id(self.cubeB.root_body)
def _reset_internal(self):
"""
Resets simulation internal configurations.
"""
super()._reset_internal()
# Reset all object positions using initializer sampler if we're not directly loading from an xml
if not self.deterministic_reset:
# Sample from the placement initializer for all objects
object_placements = self.placement_initializer.sample()
# Loop through all objects and reset their positions
for obj_pos, obj_quat, obj in object_placements.values():
self.sim.data.set_joint_qpos(obj.joints[0], np.concatenate([np.array(obj_pos), np.array(obj_quat)]))
def _setup_observables(self):
"""
Sets up observables to be used for this environment. Creates object-based observables if enabled
Returns:
OrderedDict: Dictionary mapping observable names to its corresponding Observable object
"""
observables = super()._setup_observables()
# low-level object information
if self.use_object_obs:
# Get robot prefix and define observables modality
pf = self.robots[0].robot_model.naming_prefix
modality = "object"
# position and rotation of the first cube
@sensor(modality=modality)
def cubeA_pos(obs_cache):
return np.array(self.sim.data.body_xpos[self.cubeA_body_id])
@sensor(modality=modality)
def cubeA_quat(obs_cache):
return convert_quat(np.array(self.sim.data.body_xquat[self.cubeA_body_id]), to="xyzw")
@sensor(modality=modality)
def cubeB_pos(obs_cache):
return np.array(self.sim.data.body_xpos[self.cubeB_body_id])
@sensor(modality=modality)
def cubeB_quat(obs_cache):
return convert_quat(np.array(self.sim.data.body_xquat[self.cubeB_body_id]), to="xyzw")
@sensor(modality=modality)
def gripper_to_cubeA(obs_cache):
return (
obs_cache["cubeA_pos"] - obs_cache[f"{pf}eef_pos"]
if "cubeA_pos" in obs_cache and f"{pf}eef_pos" in obs_cache
else np.zeros(3)
)
@sensor(modality=modality)
def gripper_to_cubeB(obs_cache):
return (
obs_cache["cubeB_pos"] - obs_cache[f"{pf}eef_pos"]
if "cubeB_pos" in obs_cache and f"{pf}eef_pos" in obs_cache
else np.zeros(3)
)
@sensor(modality=modality)
def cubeA_to_cubeB(obs_cache):
return (
obs_cache["cubeB_pos"] - obs_cache["cubeA_pos"]
if "cubeA_pos" in obs_cache and "cubeB_pos" in obs_cache
else np.zeros(3)
)
sensors = [cubeA_pos, cubeA_quat, cubeB_pos, cubeB_quat, gripper_to_cubeA, gripper_to_cubeB, cubeA_to_cubeB]
names = [s.__name__ for s in sensors]
# Create observables
for name, s in zip(names, sensors):
observables[name] = Observable(
name=name,
sensor=s,
sampling_rate=self.control_freq,
)
return observables
def _check_success(self):
"""
Check if blocks are stacked correctly.
Returns:
bool: True if blocks are correctly stacked
"""
_, _, r_stack = self.staged_rewards()
return r_stack > 0
def visualize(self, vis_settings):
"""
In addition to super call, visualize gripper site proportional to the distance to the cube.
Args:
vis_settings (dict): Visualization keywords mapped to T/F, determining whether that specific
component should be visualized. Should have "grippers" keyword as well as any other relevant
options specified.
"""
# Run superclass method first
super().visualize(vis_settings=vis_settings)
# Color the gripper visualization site according to its distance to the cube
if vis_settings["grippers"]:
self._visualize_gripper_to_target(gripper=self.robots[0].gripper, target=self.cubeA)
|
ARISE-Initiative/robosuite
|
robosuite/environments/manipulation/stack.py
|
Python
|
mit
| 20,724
|
[
"Gaussian"
] |
44010f02d0aacb718510f9f548b2483b6d31ee2a67c092905d0452d78bedb896
|
"""
Test a variety of different buttons.
"""
from aloe.testing import FeatureTest
from aloe_webdriver.tests.base import feature
# pylint:disable=line-too-long
class TestSteps(FeatureTest):
"""Test steps."""
@feature()
def test_press_of_submit_button_by_name(self):
"""
Given I visit test page "button_page"
When I press "submit_button"
Then I should see "You pressed the submit button"
"""
@feature()
def test_press_of_submit_button_by_value(self):
"""
Given I visit test page "button_page"
When I press "Submit button"
Then I should see "You pressed the submit button"
"""
@feature()
def test_press_of_reset_button_by_name(self):
"""
Given I visit test page "button_page"
When I press "reset_button"
Then I should see "You pressed the reset button"
"""
@feature()
def test_press_of_reset_button_by_value(self):
"""
Given I visit test page "button_page"
When I press "Reset button"
Then I should see "You pressed the reset button"
"""
@feature()
def test_press_of_input_button_by_name(self):
"""
Given I visit test page "button_page"
When I press "input_button"
Then I should see "You pressed the input button"
"""
@feature()
def test_press_of_input_button_by_value(self):
"""
Given I visit test page "button_page"
When I press "Input button"
Then I should see "You pressed the input button"
"""
@feature()
def test_press_of_image_button_by_name(self):
"""
Given I visit test page "button_page"
When I press "image_button"
Then I should see "You pressed the image button"
"""
@feature()
def test_press_of_button_element_by_name(self):
"""
Given I visit test page "button_page"
When I press "button_element"
Then I should see "You pressed the button element"
"""
@feature()
def test_press_of_button_element_by_value(self):
"""
Given I visit test page "button_page"
When I press "Button element"
Then I should see "You pressed the button element"
"""
@feature()
def test_press_of_anchor_button_by_name(self):
"""
Given I visit test page "button_page"
When I press "anchor_button"
Then I should see "You pressed the anchor button"
"""
@feature()
def test_press_of_anchor_button_by_value(self):
"""
Given I visit test page "button_page"
When I press "Anchor button"
Then I should see "You pressed the anchor button"
"""
@feature()
def test_press_of_div_button_by_name(self):
"""
Given I visit test page "button_page"
When I press "div_button"
Then I should see "You pressed the div button"
"""
@feature()
def test_press_of_div_button_by_value(self):
"""
Given I visit test page "button_page"
When I press "Div button"
Then I should see "You pressed the div button"
"""
@feature()
def test_press_of_span_button_by_name(self):
"""
Given I visit test page "button_page"
When I press "span_button"
Then I should see "You pressed the span button"
"""
@feature()
def test_press_of_span_button_by_value(self):
"""
Given I visit test page "button_page"
When I press "Span button"
Then I should see "You pressed the span button"
"""
@feature()
def test_press_of_paragraph_button_by_name(self):
"""
Given I visit test page "button_page"
When I press "paragraph_button"
Then I should see "You pressed the paragraph button"
"""
@feature()
def test_press_of_paragraph_button_by_value(self):
"""
Given I visit test page "button_page"
When I press "Paragraph button"
Then I should see "You pressed the paragraph button"
"""
|
aloetesting/aloe_webdriver
|
aloe_webdriver/tests/test_buttons.py
|
Python
|
mit
| 4,147
|
[
"VisIt"
] |
9ea98c5b3e5179419e109e6d6ddf22b271101ed2849651d7efaaebd9a335de5e
|
# Modified: Richar J. Hall, 07/2010 rjhall@berkeley.edu
# Author: Pawel A.Penczek, 09/09/2006 (Pawel.A.Penczek@uth.tmc.edu)
# Copyright (c) 2000-2006 The University of Texas - Houston Medical School
#
# This software is issued under a joint BSD/GNU license. You may use the
# source code in this file under either license. However, note that the
# complete EMAN2 and SPARX software packages have some GPL dependencies,
# so you are responsible for compliance with the licenses of these packages
# if you opt to use BSD licensing. The warranty disclaimer below holfds
# in either instance.
#
# This complete copyright notice must be included in any revised version of the
# source code. Additional authorship citations may be added, but existing
# author citations must be preserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
#
from EMAN2_cppwrap import *
from global_def import *
import sys
import types
def ali3d(stack, ref_vol, outdir, maskfile = None, ir = 1, ou = -1, rs = 1,
xr = "4 2 2 1", yr = "-1", ts = "1 1 0.5 0.25", delta = "10 6 4 4", an = "-1",
center = 0, maxit = 5, term = 95, CTF = False, snr = 1.0, ref_a = "S", sym = "c1",
sort=True, cutoff=999.99, pix_cutoff="0", two_tail=False, model_jump = "1 1 1 1 1", restart=False, save_half=False,
protos=None, oplane=None, lmask=-1, ilmask=-1, findseam=False, vertstep=None, hpars="-1", hsearch="73.0 170.0",
full_output = False, compare_repro = False, compare_ref_free = "-1" ,ref_free_cutoff = "-1 -1 -1 -1", debug = False, recon_pad = 4, MPI = False):
if MPI:
ali3d_MPI(stack, ref_vol, outdir, maskfile, ir, ou, rs, xr, yr, ts,
delta, an, center, maxit,term, CTF, snr, ref_a, sym,
sort, cutoff, pix_cutoff, two_tail, model_jump,restart, save_half,
protos, oplane, lmask, ilmask, findseam, vertstep, hpars, hsearch,
full_output, compare_repro, compare_ref_free, ref_free_cutoff, debug, recon_pad)
return
print_end_msg("ali3d")
def ali3d_MPI(stack, ref_vol, outdir, maskfile = None, ir = 1, ou = -1, rs = 1,
xr = "4 2 2 1", yr = "-1", ts = "1 1 0.5 0.25", delta = "10 6 4 4", an = "-1",
center = 0, maxit = 5, term = 95, CTF = False, snr = 1.0, ref_a = "S", sym = "c1",
sort=True, cutoff=999.99, pix_cutoff="0", two_tail=False, model_jump="1 1 1 1 1", restart=False, save_half=False,
protos=None, oplane=None, lmask=-1, ilmask=-1, findseam=False, vertstep=None, hpars="-1", hsearch="73.0 170.0",
full_output = False, compare_repro = False, compare_ref_free = "-1", ref_free_cutoff= "-1 -1 -1 -1", debug = False, recon_pad = 4):
from alignment import Numrinit, prepare_refrings
from utilities import model_circle, get_image, drop_image, get_input_from_string
from utilities import bcast_list_to_all, bcast_number_to_all, reduce_EMData_to_root, bcast_EMData_to_all
from utilities import send_attr_dict
from utilities import get_params_proj, file_type
from fundamentals import rot_avg_image
import os
import types
from utilities import print_begin_msg, print_end_msg, print_msg
from mpi import mpi_bcast, mpi_comm_size, mpi_comm_rank, MPI_FLOAT, MPI_COMM_WORLD, mpi_barrier, mpi_reduce
from mpi import mpi_reduce, MPI_INT, MPI_SUM, mpi_finalize
from filter import filt_ctf
from projection import prep_vol, prgs
from statistics import hist_list, varf3d_MPI
from numpy import array, bincount, array2string, ones
number_of_proc = mpi_comm_size(MPI_COMM_WORLD)
myid = mpi_comm_rank(MPI_COMM_WORLD)
main_node = 0
if myid == main_node:
if os.path.exists(outdir): ERROR('Output directory exists, please change the name and restart the program', "ali3d_MPI", 1)
os.mkdir(outdir)
mpi_barrier(MPI_COMM_WORLD)
if debug:
from time import sleep
while not os.path.exists(outdir):
print "Node ",myid," waiting..."
sleep(5)
info_file = os.path.join(outdir, "progress%04d"%myid)
finfo = open(info_file, 'w')
else:
finfo = None
mjump = get_input_from_string(model_jump)
xrng = get_input_from_string(xr)
if yr == "-1": yrng = xrng
else : yrng = get_input_from_string(yr)
step = get_input_from_string(ts)
delta = get_input_from_string(delta)
ref_free_cutoff = get_input_from_string(ref_free_cutoff)
pix_cutoff = get_input_from_string(pix_cutoff)
lstp = min(len(xrng), len(yrng), len(step), len(delta))
if an == "-1":
an = [-1] * lstp
else:
an = get_input_from_string(an)
# make sure pix_cutoff is set for all iterations
if len(pix_cutoff)<lstp:
for i in xrange(len(pix_cutoff),lstp):
pix_cutoff.append(pix_cutoff[-1])
# don't waste time on sub-pixel alignment for low-resolution ang incr
for i in range(len(step)):
if (delta[i] > 3 or delta[i] == -1) and step[i] < 1:
step[i] = 1
first_ring = int(ir)
rstep = int(rs)
last_ring = int(ou)
max_iter = int(maxit)
center = int(center)
nrefs = EMUtil.get_image_count( ref_vol )
nmods = 0
if maskfile:
# read number of masks within each maskfile (mc)
nmods = EMUtil.get_image_count( maskfile )
# open masks within maskfile (mc)
maskF = EMData.read_images(maskfile, xrange(nmods))
vol = EMData.read_images(ref_vol, xrange(nrefs))
nx = vol[0].get_xsize()
# for helical processing:
helicalrecon = False
if protos is not None or hpars != "-1" or findseam is True:
helicalrecon = True
# if no out-of-plane param set, use 5 degrees
if oplane is None:
oplane=5.0
if protos is not None:
proto = get_input_from_string(protos)
if len(proto) != nrefs:
print_msg("Error: insufficient protofilament numbers supplied")
sys.exit()
if hpars != "-1":
hpars = get_input_from_string(hpars)
if len(hpars) != 2*nrefs:
print_msg("Error: insufficient helical parameters supplied")
sys.exit()
## create helical paramter file for helical reconstruction
if helicalrecon is True:
# create initial helical parameter files
for iref in xrange(nrefs):
hpar = os.path.join(outdir,"hpar%02d.spi"%(iref))
params = False
if hpars != "-1":
# if helical parameters explicitly given, set twist & rise
params = [float(hpars[iref*2]),float(hpars[(iref*2)+1])]
if not os.path.isfile(hpar):
create_hpar(hpar,proto[iref],params,vertstep)
# get values for helical search parameters
hsearch = get_input_from_string(hsearch)
if len(hsearch) != 2:
print_msg("Error: specify outer and inner radii for helical search")
sys.exit()
if last_ring < 0 or last_ring > int(nx/2)-2 : last_ring = int(nx/2) - 2
if myid == main_node:
# import user_functions
# user_func = user_functions.factory[user_func_name]
print_begin_msg("ali3d_MPI")
print_msg("Input stack : %s\n"%(stack))
print_msg("Reference volume : %s\n"%(ref_vol))
print_msg("Output directory : %s\n"%(outdir))
if nmods > 0:
print_msg("Maskfile (number of masks) : %s (%i)\n"%(maskfile,nmods))
print_msg("Inner radius : %i\n"%(first_ring))
print_msg("Outer radius : %i\n"%(last_ring))
print_msg("Ring step : %i\n"%(rstep))
print_msg("X search range : %s\n"%(xrng))
print_msg("Y search range : %s\n"%(yrng))
print_msg("Translational step : %s\n"%(step))
print_msg("Angular step : %s\n"%(delta))
print_msg("Angular search range : %s\n"%(an))
print_msg("Maximum iteration : %i\n"%(max_iter))
print_msg("Center type : %i\n"%(center))
print_msg("CTF correction : %s\n"%(CTF))
print_msg("Signal-to-Noise Ratio : %f\n"%(snr))
print_msg("Reference projection method : %s\n"%(ref_a))
print_msg("Symmetry group : %s\n"%(sym))
print_msg("Fourier padding for 3D : %i\n"%(recon_pad))
print_msg("Number of reference models : %i\n"%(nrefs))
print_msg("Sort images between models : %s\n"%(sort))
print_msg("Allow images to jump : %s\n"%(mjump))
print_msg("CC cutoff standard dev : %f\n"%(cutoff))
print_msg("Two tail cutoff : %s\n"%(two_tail))
print_msg("Termination pix error : %f\n"%(term))
print_msg("Pixel error cutoff : %s\n"%(pix_cutoff))
print_msg("Restart : %s\n"%(restart))
print_msg("Full output : %s\n"%(full_output))
print_msg("Compare reprojections : %s\n"%(compare_repro))
print_msg("Compare ref free class avgs : %s\n"%(compare_ref_free))
print_msg("Use cutoff from ref free : %s\n"%(ref_free_cutoff))
if protos:
print_msg("Protofilament numbers : %s\n"%(proto))
print_msg("Using helical search range : %s\n"%hsearch)
if findseam is True:
print_msg("Using seam-based reconstruction\n")
if hpars != "-1":
print_msg("Using hpars : %s\n"%hpars)
if vertstep != None:
print_msg("Using vertical step : %.2f\n"%vertstep)
if save_half is True:
print_msg("Saving even/odd halves\n")
for i in xrange(100) : print_msg("*")
print_msg("\n\n")
if maskfile:
if type(maskfile) is types.StringType: mask3D = get_image(maskfile)
else: mask3D = maskfile
else: mask3D = model_circle(last_ring, nx, nx, nx)
numr = Numrinit(first_ring, last_ring, rstep, "F")
mask2D = model_circle(last_ring,nx,nx) - model_circle(first_ring,nx,nx)
fscmask = model_circle(last_ring,nx,nx,nx)
if CTF:
from reconstruction_rjh import rec3D_MPI
from filter import filt_ctf
else: from reconstruction_rjh import rec3D_MPI_noCTF
if myid == main_node:
active = EMUtil.get_all_attributes(stack, 'active')
list_of_particles = []
for im in xrange(len(active)):
if active[im]: list_of_particles.append(im)
del active
nima = len(list_of_particles)
else:
nima = 0
total_nima = bcast_number_to_all(nima, source_node = main_node)
# nima_per_ref = total_nima / nrefs
if myid != main_node:
list_of_particles = [-1]*total_nima
list_of_particles = bcast_list_to_all(list_of_particles, source_node = main_node)
image_start, image_end = MPI_start_end(total_nima, number_of_proc, myid)
# create a list of images for each node
list_of_particles = list_of_particles[image_start: image_end]
nima = len(list_of_particles)
if debug:
finfo.write("image_start, image_end: %d %d\n" %(image_start, image_end))
finfo.flush()
data = EMData.read_images(stack, list_of_particles)
t_zero = Transform({"type":"spider","phi":0,"theta":0,"psi":0,"tx":0,"ty":0})
transmulti = [[t_zero for i in xrange(nrefs)] for j in xrange(nima)]
for im in xrange( nima ):
transmulti[im][0] = data[im].get_attr("xform.projection")
scoremulti = [[0.0 for i in xrange(nrefs)] for j in xrange(nima)]
pixelmulti = [[0.0 for i in xrange(nrefs)] for j in xrange(nima)]
ref_res = [0.0 for x in xrange(nrefs)]
apix = data[0].get_attr('apix_x')
# for oplane parameter, create cylindrical mask
if oplane is not None and myid == main_node:
mask3D = createCylMask(data,ou,lmask,ilmask)
# if finding seam of helix, create wedge masks
if findseam is True:
wedgemask=[]
for pf in xrange(nrefs):
wedgemask.append(EMData())
for im in xrange(nima):
data[im].set_attr('ID', list_of_particles[im])
data[im].set_attr('pix_score', int(0))
if CTF:
ctf_params = data[im].get_attr("ctf")
st = Util.infomask(data[im], mask2D, False)
data[im] -= st[0]
data[im] = filt_ctf(data[im], ctf_params, sign = -1)
data[im].set_attr('ctf_applied', 1)
if debug:
finfo.write( '%d loaded \n' % nima )
finfo.flush()
if myid == main_node:
# initialize data for the reference preparation function
ref_data = [ mask3D, max(center,0), None, None, None, None ]
# for method -1, switch off centering in user function
from time import time
# this is needed for gathering of pixel errors
disps = []
recvcount = []
disps_score = []
recvcount_score = []
for im in xrange(number_of_proc):
if( im == main_node ):
disps.append(0)
disps_score.append(0)
else:
disps.append(disps[im-1] + recvcount[im-1])
disps_score.append(disps_score[im-1] + recvcount_score[im-1])
ib, ie = MPI_start_end(total_nima, number_of_proc, im)
recvcount.append( ie - ib )
recvcount_score.append((ie-ib)*nrefs)
pixer = [0.0]*nima
cs = [0.0]*3
total_iter = 0
volodd = EMData.read_images(ref_vol, xrange(nrefs))
voleve = EMData.read_images(ref_vol, xrange(nrefs))
if restart:
if CTF: vol[0], fscc, volodd[0], voleve[0] = rec3D_MPI(data, snr, sym, fscmask, os.path.join(outdir, "fsc_000_00"), myid, main_node, index = -1,npad = recon_pad)
else: vol[0], fscc, volodd[0], voleve[0] = rec3D_MPI_noCTF(data, sym, fscmask, os.path.join(outdir, "fsc_000_00"), myid, main_node, index = -1, npad = recon_pad)
if myid == main_node:
vol[0].write_image(os.path.join(outdir, "vol_000_00.hdf"),-1)
if save_half is True:
volodd[0].write_image(os.path.join(outdir, "volodd_000_00.hdf"),-1)
voleve[0].write_image(os.path.join(outdir, "voleve_000_00.hdf"),-1)
ref_data[2] = vol[0]
ref_data[3] = fscc
# call user-supplied function to prepare reference image, i.e., center and filter it
vol[0], cs,fl = ref_ali3d(ref_data)
vol[0].write_image(os.path.join(outdir, "volf_000_00.hdf"),-1)
if (apix == 1):
res_msg = "Models filtered at spatial frequency of:\t"
res = fl
else:
res_msg = "Models filtered at resolution of: \t"
res = apix / fl
ares = array2string(array(res), precision = 2)
print_msg("%s%s\n\n"%(res_msg,ares))
bcast_EMData_to_all(vol[0], myid, main_node)
# write out headers, under MPI writing has to be done sequentially
mpi_barrier(MPI_COMM_WORLD)
# projection matching
for N_step in xrange(lstp):
# if compare_ref_free == "-1":
# ref_free_cutoff[N_step] =-1
# print ref_free_cutoff
terminate = 0
Iter = -1
while(Iter < max_iter-1 and terminate == 0):
Iter += 1
total_iter += 1
itout = "%03g_%02d" %(delta[N_step], Iter)
if myid == main_node:
start_time = time()
print_msg("ITERATION #%3d, inner iteration #%3d\nDelta = %4.1f, an = %5.2f, xrange = %5.2f, yrange = %5.2f, step = %5.2f\n\n"%(N_step, Iter, delta[N_step], an[N_step], xrng[N_step],yrng[N_step],step[N_step]))
for iref in xrange(nrefs):
volft,kb = prep_vol( vol[iref] )
## constrain projections to out of plane parameter
if oplane is not None:
refrings = prepare_refringsHelical( volft, kb, nx, delta[N_step], ref_a, oplane, numr, sym, True)
else:
refrings = prepare_refrings( volft, kb, nx, delta[N_step], ref_a, sym, numr, True)
del volft,kb
#R if myid== main_node:
#R print_msg( "Time to prepare rings: %d\n" % (time()-start_time) )
#R start_time = time()
for im in xrange( nima ):
if an[N_step] == -1:
data[im].set_attr("xform.projection", transmulti[im][iref])
t1, peak, pixer[im] = proj_ali_incore(data[im],refrings,numr,xrng[N_step],yrng[N_step],step[N_step],finfo)
else:
data[im].set_attr("xform.projection", transmulti[im][iref])
t1, peak, pixer[im] = proj_ali_incore_local(data[im],refrings,numr,xrng[N_step],yrng[N_step],step[N_step],an[N_step],finfo)
data[im].set_attr("xform.projection", t1)
scoremulti[im][iref] = peak
from pixel_error import max_3D_pixel_error
# t1 is the current param
#t1 = data[im].get_attr("xform.projection")
t2 = transmulti[im][iref]
pixelmulti[im][iref] = max_3D_pixel_error(t1,t2,numr[-3])
transmulti[im][iref] = t1
scoremultisend = sum(scoremulti,[])
pixelmultisend = sum(pixelmulti,[])
from mpi import mpi_gatherv
tmp = mpi_gatherv(scoremultisend,len(scoremultisend),MPI_FLOAT, recvcount_score, disps_score, MPI_FLOAT, main_node,MPI_COMM_WORLD)
tmp1 = mpi_gatherv(pixelmultisend,len(pixelmultisend),MPI_FLOAT, recvcount_score, disps_score, MPI_FLOAT, main_node,MPI_COMM_WORLD)
tmp = mpi_bcast(tmp,(total_nima * nrefs), MPI_FLOAT,0, MPI_COMM_WORLD)
tmp1 = mpi_bcast(tmp1,(total_nima * nrefs), MPI_FLOAT,0, MPI_COMM_WORLD)
tmp = map(float,tmp)
tmp1 = map(float,tmp1)
score = array(tmp).reshape(-1,nrefs)
pixelerror = array(tmp1).reshape(-1,nrefs)
score_local = array(scoremulti)
mean_score = score.mean(axis=0)
std_score = score.std(axis=0)
cut = mean_score - (cutoff * std_score)
cut2 = mean_score + (cutoff * std_score)
res_max = score_local.argmax(axis=1)
minus_cc = [0.0 for x in xrange(nrefs)]
minus_pix = [0.0 for x in xrange(nrefs)]
minus_ref = [0.0 for x in xrange(nrefs)]
#output pixel errors
if(myid == main_node):
from statistics import hist_list
lhist = 20
pixmin = pixelerror.min(axis=1)
region, histo = hist_list(pixmin, lhist)
if(region[0] < 0.0): region[0] = 0.0
print_msg("Histogram of pixel errors\n ERROR number of particles\n")
for lhx in xrange(lhist):
print_msg(" %10.3f %7d\n"%(region[lhx], histo[lhx]))
# Terminate if 95% within 1 pixel error
im = 0
for lhx in xrange(lhist):
if(region[lhx] > 1.0): break
im += histo[lhx]
print_msg( "Percent of particles with pixel error < 1: %f\n\n"% (im/float(total_nima)*100))
term_cond = float(term)/100
if(im/float(total_nima) > term_cond):
terminate = 1
print_msg("Terminating internal loop\n")
del region, histo
terminate = mpi_bcast(terminate, 1, MPI_INT, 0, MPI_COMM_WORLD)
terminate = int(terminate[0])
for im in xrange(nima):
if(sort==False):
data[im].set_attr('group',999)
elif (mjump[N_step]==1):
data[im].set_attr('group',int(res_max[im]))
pix_run = data[im].get_attr('pix_score')
if (pix_cutoff[N_step]==1 and (terminate==1 or Iter == max_iter-1)):
if (pixelmulti[im][int(res_max[im])] > 1):
data[im].set_attr('pix_score',int(777))
if (score_local[im][int(res_max[im])]<cut[int(res_max[im])]) or (two_tail and score_local[im][int(res_max[im])]>cut2[int(res_max[im])]):
data[im].set_attr('group',int(888))
minus_cc[int(res_max[im])] = minus_cc[int(res_max[im])] + 1
if(pix_run == 777):
data[im].set_attr('group',int(777))
minus_pix[int(res_max[im])] = minus_pix[int(res_max[im])] + 1
if (compare_ref_free != "-1") and (ref_free_cutoff[N_step] != -1) and (total_iter > 1):
id = data[im].get_attr('ID')
if id in rejects:
data[im].set_attr('group',int(666))
minus_ref[int(res_max[im])] = minus_ref[int(res_max[im])] + 1
minus_cc_tot = mpi_reduce(minus_cc,nrefs,MPI_FLOAT,MPI_SUM,0,MPI_COMM_WORLD)
minus_pix_tot = mpi_reduce(minus_pix,nrefs,MPI_FLOAT,MPI_SUM,0,MPI_COMM_WORLD)
minus_ref_tot = mpi_reduce(minus_ref,nrefs,MPI_FLOAT,MPI_SUM,0,MPI_COMM_WORLD)
if (myid == main_node):
if(sort):
tot_max = score.argmax(axis=1)
res = bincount(tot_max)
else:
res = ones(nrefs) * total_nima
print_msg("Particle distribution: \t\t%s\n"%(res*1.0))
afcut1 = res - minus_cc_tot
afcut2 = afcut1 - minus_pix_tot
afcut3 = afcut2 - minus_ref_tot
print_msg("Particle distribution after cc cutoff:\t\t%s\n"%(afcut1))
print_msg("Particle distribution after pix cutoff:\t\t%s\n"%(afcut2))
print_msg("Particle distribution after ref cutoff:\t\t%s\n\n"%(afcut3))
res = [0.0 for i in xrange(nrefs)]
for iref in xrange(nrefs):
if(center == -1):
from utilities import estimate_3D_center_MPI, rotate_3D_shift
dummy=EMData()
cs[0], cs[1], cs[2], dummy, dummy = estimate_3D_center_MPI(data, total_nima, myid, number_of_proc, main_node)
#R if myid == main_node:
#R msg = " Average center x = %10.3f Center y = %10.3f Center z = %10.3f\n"%(cs[0], cs[1], cs[2])
#R print_msg(msg)
cs = mpi_bcast(cs, 3, MPI_FLOAT, main_node, MPI_COMM_WORLD)
cs = [-float(cs[0]), -float(cs[1]), -float(cs[2])]
rotate_3D_shift(data, cs)
if(sort):
group = iref
for im in xrange(nima):
imgroup = data[im].get_attr('group')
if imgroup == iref:
data[im].set_attr('xform.projection',transmulti[im][iref])
else:
group = int(999)
for im in xrange(nima):
data[im].set_attr('xform.projection',transmulti[im][iref])
if(nrefs == 1):
modout = ""
else:
modout = "_model_%02d"%(iref)
## for helical reconstruction set up param dictionary
hparams=None
if helicalrecon is True:
hparams = {}
hpar = os.path.join(outdir,"hpar%02d.spi"%(iref))
hparams['hfile'] = hpar
hparams['apix'] = apix
hparams['lmask'] = lmask
hparams['seam'] = findseam
hparams['vert'] = vertstep
# inner radius for helical search
hparams['isearch'] = hsearch[0]
# outer radius for helical search
hparams['osearch'] = hsearch[1]
hparams['nosymout'] = os.path.join(outdir,"volNoSym_%s.hdf"%(itout))
## save a copy of the last iteration's volume for aligning after himpose
if findseam is not True:
oldvol = vol[iref].copy()
if CTF:
vol[iref], fscc, volodd[iref], voleve[iref] = rec3D_MPI(data, snr, sym, fscmask, os.path.join(outdir, "fsc_%s%s"%(itout,modout)), myid, main_node, index = group, npad = recon_pad, hparams=hparams )
else:
vol[iref], fscc, volodd[iref], voleve[iref] = rec3D_MPI_noCTF(data, sym, fscmask, os.path.join(outdir, "fsc_%s%s"%(itout,modout)), myid, main_node, index = group, npad = recon_pad, hparams=hparams )
#R if myid == main_node:
#R print_msg("3D reconstruction time = %d\n"%(time()-start_time))
#R start_time = time()
if myid == main_node:
if helicalrecon:
if findseam is True:
from reconstruction_rjh import applyHsym_seam
vol[iref].write_image(os.path.join(outdir, "volOverSym_%s.hdf"%(itout)),-1)
# mask out tubulin & apply sym again for seam
# have to make a new wedgemask for each iteration
wedgemask[iref]=createWedgeMask(nx,proto[iref],apix,hpar)
vol[iref] = applyHsym_seam(vol[iref],wedgemask[iref],hpar,apix)
else:
from reconstruction_rjh import align3Dvols
vol[iref] = align3Dvols(oldvol,vol[iref],apix)
volmsk = vol[iref]*mask3D
volmsk = vol[iref]
volmsk.write_image(os.path.join(outdir, "vol_%s.hdf"%(itout)),-1)
if save_half is True:
volh = volodd[iref]*mask3D
volh.write_image(os.path.join(outdir, "volodd_%s.hdf"%(itout)),-1)
volh = voleve[iref]*mask3D
volh.write_image(os.path.join(outdir, "voleve_%s.hdf"%(itout)),-1)
del volh
else:
vol[iref].write_image(os.path.join(outdir, "vol_%s.hdf"%(itout)),-1)
if save_half is True:
volodd[iref].write_image(os.path.join(outdir, "volodd_%s.hdf"%(itout)),-1)
voleve[iref].write_image(os.path.join(outdir, "voleve_%s.hdf"%(itout)),-1)
if nmods > 1:
# Read mask for multiplying
ref_data[0] = maskF[iref]
ref_data[2] = vol[iref]
ref_data[3] = fscc
# call user-supplied function to prepare reference image, i.e., center and filter it
vol[iref], cs,fl = ref_ali3d(ref_data)
vol[iref].write_image(os.path.join(outdir, "volf_%s.hdf"%(itout)),-1)
if (apix == 1):
res_msg = "Models filtered at spatial frequency of:\t"
res[iref] = fl
else:
res_msg = "Models filtered at resolution of: \t"
res[iref] = apix / fl
bcast_EMData_to_all(vol[iref], myid, main_node)
if compare_ref_free != "-1": compare_repro = True
if compare_repro:
outfile_repro = comp_rep(refrings, data, itout, modout, vol[iref], group, nima, nx, myid, main_node, outdir)
mpi_barrier(MPI_COMM_WORLD)
if compare_ref_free != "-1":
ref_free_output = os.path.join(outdir,"ref_free_%s%s"%(itout,modout))
rejects = compare(compare_ref_free, outfile_repro,ref_free_output,yrng[N_step], xrng[N_step], rstep,nx,apix,ref_free_cutoff[N_step], number_of_proc, myid, main_node)
par_str = ['xform.projection','ID','group']
if myid == main_node:
# start_time = time()
from utilities import recv_attr_dict
recv_attr_dict(main_node, stack, data, par_str, image_start, image_end, number_of_proc)
# print_msg("File write time = %d\n"%(time()-start_time))
else: send_attr_dict(main_node, data, par_str, image_start, image_end)
if myid == main_node:
ares = array2string(array(res), precision = 2)
print_msg("%s%s\n\n"%(res_msg,ares))
dummy = EMData()
if full_output:
nimat = EMUtil.get_image_count(stack)
output_file = os.path.join(outdir, "paramout_%s"%itout)
foutput = open(output_file, 'w')
for im in xrange(nimat):
dummy.read_image(stack,im,True)
param3d = dummy.get_attr('xform.projection')
# retrieve alignments in EMAN-format
paramEMAN = param3d.get_params('eman')
g = dummy.get_attr("group")
outstring = "%f\t%f\t%f\t%f\t%f\t%i\n" %(paramEMAN["az"], paramEMAN["alt"], paramEMAN["phi"], paramEMAN["tx"], paramEMAN["ty"], g)
foutput.write(outstring)
foutput.close()
del dummy
mpi_barrier(MPI_COMM_WORLD)
# mpi_finalize()
if myid == main_node: print_end_msg("ali3d_MPI")
def MPI_start_end(nima, nproc, myid):
image_start = int(round(float(nima)/nproc*myid))
image_end = int(round(float(nima)/nproc*(myid+1)))
return image_start, image_end
def ref_ali3d( ref_data ):
from utilities import print_msg
from filter import fit_tanh, filt_tanl
from fundamentals import fshift
from morphology import threshold
fl = ref_data[2].cmp("dot",ref_data[2], {"negative":0, "mask":ref_data[0]} )
cs = [0.0]*3
stat = Util.infomask(ref_data[2], ref_data[0], False)
volf = ref_data[2] - stat[0]
Util.mul_scalar(volf, 1.0/stat[1])
Util.mul_img(volf, ref_data[0])
fl, aa = fit_tanh(ref_data[3])
volf = filt_tanl(volf, fl, aa)
volf.process_inplace("normalize")
if ref_data[1] == 1:
cs = volf.phase_cog()
volf = fshift(volf, -cs[0], -cs[1], -cs[2])
return volf, cs, fl
def comp_rep(refrings, data, itout, modout, vol, group, nima, nx, myid, main_node, outdir):
from fundamentals import rot_shift2D
from utilities import get_params_proj, params_3D_2D
from mpi import mpi_reduce, MPI_COMM_WORLD, MPI_FLOAT, MPI_SUM
avg = [EMData() for i in xrange(len(refrings))]
avg_csum = [0.0 for i in xrange(len(refrings))]
for i in xrange(len(refrings)):
avg[i] = EMData()
avg[i].set_size(nx,nx)
phi = refrings[i].get_attr("phi")
theta = refrings[i].get_attr("theta")
t = Transform({"type":"spider","phi":phi,"theta":theta,"psi":0.0})
avg[i].set_attr("xform.projection",t)
for im in xrange(nima):
iref = data[im].get_attr("assign")
gim = data[im].get_attr("group")
if gim == group:
[phi, theta, psi, s2x, s2y] = get_params_proj(data[im])
[alpha, sx,sy,mirror] = params_3D_2D(phi,theta,psi,s2x,s2y)
temp = rot_shift2D(data[im],alpha, sx, sy, mirror, 1.0)
avg[iref] = avg[iref] + temp
avg_csum[iref] = avg_csum[iref] + 1
from utilities import reduce_EMData_to_root
for i in xrange(len(refrings)):
reduce_EMData_to_root(avg[i], myid, main_node)
avg_sum = mpi_reduce(avg_csum[i],1,MPI_FLOAT,MPI_SUM,0,MPI_COMM_WORLD)
outfile_repro = os.path.join(outdir, "repro_%s%s.hdf"%(itout,modout))
if myid ==0:
outfile = os.path.join(outdir, "compare_repro_%s%s.hdf"%(itout,modout))
avg[i].write_image(outfile,-1)
t = avg[i].get_attr("xform.projection")
proj = vol.project("pawel",t)
proj.set_attr("xform.projection",t)
proj.set_attr("Raw_im_count", float(avg_sum))
proj.write_image(outfile,-1)
proj.write_image(outfile_repro,-1)
return outfile_repro
def compare(compare_ref_free, outfile_repro,ref_free_output,yrng, xrng, rstep,nx,apix,ref_free_cutoff, nproc, myid, main_node):
from alignment import Numrinit, ringwe, Applyws
from random import seed, randint
from utilities import get_params2D, set_params2D, model_circle, inverse_transform2, combine_params2
from fundamentals import rot_shift2D
from mpi import MPI_COMM_WORLD, mpi_barrier, mpi_bcast, MPI_INT
from statistics import fsc_mask
from filter import fit_tanh
from numpy import array
fout = "%s.hdf" % ref_free_output
frc_out = "%s_frc" % ref_free_output
res_out = "%s_res" % ref_free_output
nima = EMUtil.get_image_count(compare_ref_free)
image_start, image_end = MPI_start_end(nima, nproc, myid)
ima = EMData()
ima.read_image(compare_ref_free, image_start)
last_ring = nx/2-2
first_ring = 1
mask = model_circle(last_ring, nx, nx)
refi = []
numref = EMUtil.get_image_count(outfile_repro)
cnx = nx/2 +1
cny = cnx
mode = "F"
numr = Numrinit(first_ring, last_ring, rstep, mode)
wr = ringwe(numr, mode)
ima.to_zero()
for j in xrange(numref):
temp = EMData()
temp.read_image(outfile_repro, j)
# even, odd, numer of even, number of images. After frc, totav
refi.append(temp)
# for each node read its share of data
data = EMData.read_images(compare_ref_free, range(image_start, image_end))
for im in xrange(image_start, image_end):
data[im-image_start].set_attr('ID', im)
set_params2D(data[im-image_start],[0,0,0,0,1])
ringref = []
for j in xrange(numref):
refi[j].process_inplace("normalize.mask", {"mask":mask, "no_sigma":1}) # normalize reference images to N(0,1)
cimage = Util.Polar2Dm(refi[j], cnx, cny, numr, mode)
Util.Frngs(cimage, numr)
Applyws(cimage, numr, wr)
ringref.append(cimage)
if myid == main_node: seed(1000)
data_shift = []
frc = []
res = []
for im in xrange(image_start, image_end):
alpha, sx, sy, mirror, scale = get_params2D(data[im-image_start])
alphai, sxi, syi, scalei = inverse_transform2(alpha, sx, sy, 1.0)
# normalize
data[im-image_start].process_inplace("normalize.mask", {"mask":mask, "no_sigma":1}) # subtract average under the mask
# align current image to the reference
[angt, sxst, syst, mirrort, xiref, peakt] = Util.multiref_polar_ali_2d(data[im-image_start], ringref, xrng, yrng, 1, mode, numr, cnx+sxi, cny+syi)
iref = int(xiref)
[alphan, sxn, syn, mn] = combine_params2(0.0, -sxi, -syi, 0, angt, sxst, syst, (int)(mirrort))
set_params2D(data[im-image_start], [alphan, sxn, syn, int(mn), scale])
temp = rot_shift2D(data[im-image_start], alphan, sxn, syn, mn)
temp.set_attr('assign',iref)
tfrc = fsc_mask(temp,refi[iref],mask = mask)
temp.set_attr('frc',tfrc[1])
res = fit_tanh(tfrc)
temp.set_attr('res',res)
data_shift.append(temp)
for node in xrange(nproc):
if myid == node:
for image in data_shift:
image.write_image(fout,-1)
refindex = image.get_attr('assign')
refi[refindex].write_image(fout,-1)
mpi_barrier(MPI_COMM_WORLD)
rejects = []
if myid == main_node:
a = EMData()
index = 0
frc = []
res = []
temp = []
classes = []
for im in xrange(nima):
a.read_image(fout, index)
frc.append(a.get_attr("frc"))
if ref_free_cutoff != -1: classes.append(a.get_attr("class_ptcl_idxs"))
tmp = a.get_attr("res")
temp.append(tmp[0])
res.append("%12f" %(apix/tmp[0]))
res.append("\n")
index = index + 2
res_num = array(temp)
mean_score = res_num.mean(axis=0)
std_score = res_num.std(axis=0)
std = std_score / 2
if ref_free_cutoff !=-1:
cutoff = mean_score - std * ref_free_cutoff
reject = res_num < cutoff
index = 0
for i in reject:
if i: rejects.extend(classes[index])
index = index + 1
rejects.sort()
length = mpi_bcast(len(rejects),1,MPI_INT,main_node, MPI_COMM_WORLD)
rejects = mpi_bcast(rejects,length , MPI_INT, main_node, MPI_COMM_WORLD)
del a
fout_frc = open(frc_out,'w')
fout_res = open(res_out,'w')
fout_res.write("".join(res))
temp = zip(*frc)
datstrings = []
for i in temp:
for j in i:
datstrings.append(" %12f" % (j))
datstrings.append("\n")
fout_frc.write("".join(datstrings))
fout_frc.close()
del refi
del ringref
return rejects
def proj_ali_incore(data, refrings, numr, xrng, yrng, step, finfo=None):
from utilities import compose_transform2
ID = data.get_attr("ID")
if finfo:
from utilities import get_params_proj
phi, theta, psi, s2x, s2y = get_params_proj(data)
finfo.write("Image id: %6d\n"%(ID))
finfo.write("Old parameters: %9.4f %9.4f %9.4f %9.4f %9.4f\n"%(phi, theta, psi, s2x, s2y))
finfo.flush()
mode = "F"
# center is in SPIDER convention
nx = data.get_xsize()
ny = data.get_ysize()
cnx = nx//2 + 1
cny = ny//2 + 1
#phi, theta, psi, sxo, syo = get_params_proj(data)
t1 = data.get_attr("xform.projection")
dp = t1.get_params("spider")
# get translations from data
tx = dp["tx"]
ty = dp["ty"]
[ang, sxs, sys, mirror, iref, peak] = Util.multiref_polar_ali_2d(data, refrings, xrng, yrng, step, mode, numr, cnx+tx, cny+ty)
iref = int(iref)
data.set_attr("assign",iref)
#[ang,sxs,sys,mirror,peak,numref] = apmq(projdata[imn], ref_proj_rings, xrng, yrng, step, mode, numr, cnx-sxo, cny-syo)
#ang = (ang+360.0)%360.0
# The ormqip returns parameters such that the transformation is applied first, the mirror operation second.
# What that means is that one has to change the the Eulerian angles so they point into mirrored direction: phi+180, 180-theta, 180-psi
angb, sxb, syb, ct = compose_transform2(0.0, sxs, sys, 1, -ang, 0.0, 0.0, 1)
if mirror:
phi = (refrings[iref].get_attr("phi")+540.0)%360.0
theta = 180.0-refrings[iref].get_attr("theta")
psi = (540.0-refrings[iref].get_attr("psi")+angb)%360.0
s2x = sxb - tx
s2y = syb - ty
else:
phi = refrings[iref].get_attr("phi")
theta = refrings[iref].get_attr("theta")
psi = (refrings[iref].get_attr("psi")+angb+360.0)%360.0
s2x = sxb - tx
s2y = syb - ty
#set_params_proj(data, [phi, theta, psi, s2x, s2y])
t2 = Transform({"type":"spider","phi":phi,"theta":theta,"psi":psi})
t2.set_trans(Vec2f(-s2x, -s2y))
#data.set_attr("xform.projection", t2)
from pixel_error import max_3D_pixel_error
pixel_error = max_3D_pixel_error(t1, t2, numr[-3])
if finfo:
finfo.write( "New parameters: %9.4f %9.4f %9.4f %9.4f %9.4f %10.5f %11.3e\n\n" %(phi, theta, psi, s2x, s2y, peak, pixel_error))
finfo.flush()
return t2, peak, pixel_error
def proj_ali_incore_local(data, refrings, numr, xrng, yrng, step, an, finfo=None):
from utilities import compose_transform2
#from utilities import set_params_proj, get_params_proj
from math import cos, sin, pi
ID = data.get_attr("ID")
mode = "F"
nx = data.get_xsize()
ny = data.get_ysize()
# center is in SPIDER convention
cnx = nx//2 + 1
cny = ny//2 + 1
ant = cos(an*pi/180.0)
#phi, theta, psi, sxo, syo = get_params_proj(data)
t1 = data.get_attr("xform.projection")
dp = t1.get_params("spider")
# get translations from data
tx = dp["tx"]
ty = dp["ty"]
if finfo:
finfo.write("Image id: %6d\n"%(ID))
#finfo.write("Old parameters: %9.4f %9.4f %9.4f %9.4f %9.4f\n"%(phi, theta, psi, sxo, syo))
finfo.write("Old parameters: %9.4f %9.4f %9.4f %9.4f %9.4f\n"%(dp["phi"], dp["theta"], dp["psi"], -tx, -ty))
finfo.flush()
#[ang, sxs, sys, mirror, iref, peak] = Util.multiref_polar_ali_2d_local(data, refrings, xrng, yrng, step, ant, mode, numr, cnx-sxo, cny-syo)
[ang, sxs, sys, mirror, iref, peak] = Util.multiref_polar_ali_2d_local(data, refrings, xrng, yrng, step, ant, mode, numr, cnx+tx, cny+ty)
iref=int(iref)
#[ang,sxs,sys,mirror,peak,numref] = apmq_local(projdata[imn], ref_proj_rings, xrng, yrng, step, ant, mode, numr, cnx-sxo, cny-syo)
#ang = (ang+360.0)%360.0
data.set_attr("assign",iref)
if iref > -1:
# The ormqip returns parameters such that the transformation is applied first, the mirror operation second.
# What that means is that one has to change the the Eulerian angles so they point into mirrored direction: phi+180, 180-theta, 180-psi
angb, sxb, syb, ct = compose_transform2(0.0, sxs, sys, 1, -ang, 0.0, 0.0, 1)
if mirror:
phi = (refrings[iref].get_attr("phi")+540.0)%360.0
theta = 180.0-refrings[iref].get_attr("theta")
psi = (540.0-refrings[iref].get_attr("psi")+angb)%360.0
s2x = sxb - tx
s2y = syb - ty
else:
phi = refrings[iref].get_attr("phi")
theta = refrings[iref].get_attr("theta")
psi = (refrings[iref].get_attr("psi")+angb+360.0)%360.0
s2x = sxb - tx
s2y = syb - ty
#set_params_proj(data, [phi, theta, psi, s2x, s2y])
t2 = Transform({"type":"spider","phi":phi,"theta":theta,"psi":psi})
t2.set_trans(Vec2f(-s2x, -s2y))
#data.set_attr("xform.projection", t2)
from pixel_error import max_3D_pixel_error
pixel_error = max_3D_pixel_error(t1, t2, numr[-3])
if finfo:
finfo.write( "New parameters: %9.4f %9.4f %9.4f %9.4f %9.4f %10.5f %11.3e\n\n" %(phi, theta, psi, s2x, s2y, peak, pixel_error))
finfo.flush()
return t2, peak, pixel_error
else:
return -1.0e23, 0.0
#===========================
def createCylMask(data,rmax,lmask,rmin):
"""
create a cylindrical mask with gaussian edges
"""
from itertools import product
import math
apix = data[0].get_attr('apix_x')
nx = data[0].get_xsize()
## convert mask values to pixels
lmask = int((lmask/apix)/2)
rmin = int(abs(rmin)/apix)
cylRadius = (nx/2)-2
if rmax == -1:
rmax = int(240/apix)
falloff_outer = lmask*0.4
falloff_inner = rmin*0.4
## first create cylinder with inner & outer mask
cyl = EMData(nx,nx,nx)
for i in range(nx):
mask=EMData(nx,nx)
mask.to_one()
## mask the inner & outer radii
for x,y in product(range(nx),range(nx)):
dx = abs(x-nx/2)
dy = abs(y-nx/2)
r2 = dx**2+dy**2
if r2 > rmax*rmax:
wt1 = 0.5*(1 + math.cos(math.pi*min(1,(math.sqrt(r2)-rmax)/falloff_outer)))
mask.set(x,y,wt1)
elif r2 < rmin*rmin:
wt2 = 0.5*(1 + math.cos(math.pi*min(1,(rmin-math.sqrt(r2))/falloff_inner)))
mask.set(x,y,wt2)
## mask along length
dz = abs(i-nx/2)
if dz > lmask:
wt3 = 0.5*(1+math.cos(math.pi*min(1,(dz-lmask)/falloff_outer)))
mask.mult(wt3)
cyl.insert_clip(mask,(0,0,i))
cyl.write_image('mask3D_cyl.mrc')
return cyl
#===========================
def createWedgeMask(nx,csym,apix,hfile):
"""
a hard-edged wedge, by Rui Zhang
"""
import math
from reconstruction_rjh import readHsym
img = EMData(nx,nx)
img.to_zero()
#add 3 degrees to overlap with the neighboring density!
overlap=3*math.pi/180.0
alpha = math.pi/2 - math.pi/csym - overlap
for x,y in ((x,y) for x in range(0,nx) for y in range(nx/2,nx)):
dx = abs(x-nx/2)
dy = abs(y-nx/2)
# if above the line y = tan(alpha)*x
if dy >= dx*math.tan(alpha):
img.set(x,y,1)
img.process_inplace("mask.sharp",{"outer_radius":nx/2})
# get hparams from file
twist,rise,vrot,vrise = readHsym(hfile)
wedge = EMData(nx,nx,nx)
alpha = 360+(csym*twist)
lrise = csym*rise
rot = alpha/lrise*apix
for z in range(nx):
finalrot = ((z-nx/2)*rot)/3
t=Transform()
t.set_rotation({"type":"2d","alpha":-finalrot})
newslice=img.process("xform",{"transform":t})
wedge.insert_clip(newslice,(0,0,z))
wedge *= kinesinMask(nx,int(32/apix),54/apix,143/apix,rot)
wedge += kinesinMask(nx,int(30/apix),24/apix,164/apix,rot,pos=True)
# odd-numbered protofilaments are off by 1/2 twist
if csym%2==1:
t = Transform({"type":"spider","psi":twist/2})
wedge.process_inplace("xform",{"transform":t})
wedge.process_inplace("threshold.binary",{"value":0.00001})
wedge.write_image('wedge_mask_p%d.mrc'%csym)
return wedge
#===========================
def kinesinMask(nx,rad,cx,cy,rot,pos=False):
# hard-edged cylinder mask for kinesin position
img = EMData(nx,nx)
img.to_one()
if pos is True:
img.to_zero()
for x,y in ((x,y) for x in range(nx) for y in range(nx)):
dx = abs(x-cx)
dy = abs(y-cy)
r2 = dx**2+dy**2
if r2 < rad*rad:
if pos is True:
img.set(nx/2-x,nx/2+y,1)
else:
img.set(nx/2+x,nx/2+y,0)
#img.write_image('test.mrc')
cylmask = EMData(nx,nx,nx)
for z in range(nx):
finalrot=((z-nx/2)*rot)/3
t=Transform()
t.set_rotation({"type":"2d","alpha":-finalrot})
newslice=img.process("xform",{"transform":t})
cylmask.insert_clip(newslice,(0,0,z))
return cylmask
#===========================
def prepare_refringsHelical( volft, kb, nx, delta, ref_a, oplane, numr, sym='c1', MPI=False, phiEqpsi = "Minus"):
"""
prepare projections for helical processing
rotation 180 degrees inplane & specified out-of-plane
"""
from alignment import ringwe, Applyws
from projection import prgs
from math import sin, cos, pi
from applications import MPI_start_end
from utilities import bcast_list_to_all, bcast_number_to_all, reduce_EMData_to_root, bcast_EMData_to_all
import re
# convert csym to integer:
sym = int(re.sub("\D", "", sym))
# generate list of Eulerian angles for reference projections
# phi, theta, psi
mode = "F"
ref_angles = []
inplane=int((179.99/sym)/delta) + 1
# first create 0 and positive out-of-plane tilts
i = 0
while i < oplane:
for j in xrange(inplane):
t = j*delta
ref_angles.append([t,90.0+i,90.0])
i+=delta
# negative out of plane rotation
i = -(delta)
while i > -(oplane):
for j in xrange(inplane):
t = j*delta
ref_angles.append([t,90.0+i,90.0])
i-=delta
wr_four = ringwe(numr, mode)
cnx = nx//2 + 1
cny = nx//2 + 1
qv = pi/180.
num_ref = len(ref_angles)
if MPI:
from mpi import mpi_comm_rank, mpi_comm_size, MPI_COMM_WORLD
myid = mpi_comm_rank( MPI_COMM_WORLD )
ncpu = mpi_comm_size( MPI_COMM_WORLD )
else:
ncpu = 1
myid = 0
from applications import MPI_start_end
ref_start,ref_end = MPI_start_end( num_ref, ncpu, myid )
refrings = [] # list of (image objects) reference projections in Fourier representation
sizex = numr[ len(numr)-2 ] + numr[ len(numr)-1 ] - 1
for i in xrange(num_ref):
prjref = EMData()
prjref.set_size(sizex, 1, 1)
refrings.append(prjref)
for i in xrange(ref_start, ref_end):
prjref = prgs(volft, kb, [ref_angles[i][0], ref_angles[i][1], ref_angles[i][2], 0.0, 0.0])
cimage = Util.Polar2Dm(prjref, cnx, cny, numr, mode) # currently set to quadratic....
Util.Normalize_ring(cimage, numr)
Util.Frngs(cimage, numr)
Applyws(cimage, numr, wr_four)
refrings[i] = cimage
if MPI:
from utilities import bcast_EMData_to_all
for i in xrange(num_ref):
for j in xrange(ncpu):
ref_start,ref_end = MPI_start_end(num_ref,ncpu,j)
if i >= ref_start and i < ref_end: rootid = j
bcast_EMData_to_all( refrings[i], myid, rootid )
for i in xrange(len(ref_angles)):
n1 = sin(ref_angles[i][1]*qv)*cos(ref_angles[i][0]*qv)
n2 = sin(ref_angles[i][1]*qv)*sin(ref_angles[i][0]*qv)
n3 = cos(ref_angles[i][1]*qv)
refrings[i].set_attr_dict( {"n1":n1, "n2":n2, "n3":n3} )
refrings[i].set_attr("phi", ref_angles[i][0])
refrings[i].set_attr("theta", ref_angles[i][1])
refrings[i].set_attr("psi", ref_angles[i][2])
return refrings
#===========================
def create_hpar(hpar,pf,params=False,vertstep=None):
"""
create a helical symmetry file for Egelman's helical programs
file is a spider-formatted text file listing the rise & turn in angstroms
"""
if params is False:
if (pf==11):
ang = -32.47
rise = 11.08
elif (pf==12):
ang = -29.88
rise = 10.16
elif (pf==13):
ang = -27.69
rise = 9.39
elif (pf==14):
ang = -25.77
rise = 8.72
elif (pf==15):
ang = -23.83
rise = 10.81
elif (pf==16):
ang = -22.4
rise = 10.18
else:
ang = -360.0/pf
rise = 10.0
else:
ang=params[0]
rise=params[1]
f=open(hpar,'w')
f.write("%.6f\t%.6f"%(ang,rise))
if vertstep is not None:
f.write("\t%.6f\t%.6f"%(-0.01,vertstep))
f.write("\n")
f.close()
|
mcianfrocco/Cianfrocco_et_al._2013
|
3D_projection_matching_EMAN2/functions.py
|
Python
|
mit
| 43,859
|
[
"Gaussian"
] |
64f8bb82945bedb2c4e104285cf01deea2f644799e5b0bb8290cf2514db15501
|
"""Test generation helpers
Intended to functionalize common tasks when working with the pytest_generate_tests hook.
When running a test, it is quite often the case that multiple parameters need to be passed
to a single test. An example of this would be the need to run a Provider Add test against
multiple providers. We will assume that the providers are stored in the yaml under a common
structure like so:
.. code-block:: yaml
providers:
prov_1:
name: test
ip: 10.0.0.1
test_vm: abc1
prov_2:
name: test2
ip: 10.0.0.2
test_vm: abc2
Our test requires that we have a Provider Object and as an example, the 'test_vm' field of the
object. Let's assume a test prototype like so::
test_provider_add(provider_obj, test_vm):
In this case we require the test to be run twice, once for prov_1 and then again for prov_2.
We are going to use the generate function to help us provide parameters to pass to
``pytest_generate_tests()``. ``pytest_generate_tests()`` requires three pieces of
information, ``argnames``, ``argvalues`` and an ``idlist``. ``argnames`` turns into the
names we use for fixtures. In this case, ``provider_obj`` and ``provider_mgmt_sys``.
``argvalues`` becomes the place where the ``provider_obj`` and ``provider_mgmt_sys``
items are stored. Each element of ``argvalues`` is a list containing a value for both
``provider_obj`` and ``provider_mgmt_sys``. Thus, taking an element from ``argvalues``
gives us the values to unpack to make up one test. An example is below, where we assume
that a provider object is obtained via the ``Provider`` class, and the ``mgmt_sys object``
is obtained via a ``Wrapanapi`` class.
===== =============== =================
~ provider_obj test_vm
===== =============== =================
prov1 Provider(prov1) abc1
prov2 Provider(prov2) abc2
===== =============== =================
This is analogous to the following layout:
========= =============== ===============
~ argnames[0] argnames[1]
========= =============== ===============
idlist[0] argvalues[0][0] argvalues[0][1]
idlist[1] argvalues[1][0] argvalues[1][1]
========= =============== ===============
This could be generated like so:
.. code-block:: python
def gen_providers:
argnames = ['provider_obj', 'test_vm']
argvalues = []
idlist = []
for provider in yaml['providers']:
idlist.append(provider)
argvalues.append([
Provider(yaml['providers'][provider]['name']),
yaml['providers'][provider]['test_vm'])
])
return argnames, argvalues, idlist
This is then used with pytest_generate_tests like so::
pytest_generate_tests(gen_providers)
Additionally, py.test joins the values of ``idlist`` with dashes to generate a unique id for this
test, falling back to joining ``argnames`` with dashes if ``idlist`` is not set. This is the value
seen in square brackets in a test report on parametrized tests.
More information on ``parametrize`` can be found in pytest's documentation:
* https://pytest.org/latest/parametrize.html#_pytest.python.Metafunc.parametrize
"""
import pytest
from cfme.common.provider import BaseProvider
from cfme.infrastructure.config_management import get_config_manager_from_config
from cfme.roles import group_data
from cfme.utils.conf import cfme_data, auth_data
from cfme.utils.log import logger
from cfme.utils.providers import ProviderFilter, list_providers
def _param_check(metafunc, argnames, argvalues):
"""Helper function to check if parametrizing is necessary
* If no argnames were specified, parametrization is unnecessary.
* If argvalues were generated, parametrization is necessary.
* If argnames were specified, but no values were generated, the test cannot run successfully,
and will be uncollected using the :py:mod:`markers.uncollect` mark.
See usage in :py:func:`parametrize`
Args:
metafunc: metafunc objects from pytest_generate_tests
argnames: argnames list for use in metafunc.parametrize
argvalues: argvalues list for use in metafunc.parametrize
Returns:
* ``True`` if this test should be parametrized
* ``False`` if it shouldn't be parametrized
* ``None`` if the test will be uncollected
"""
# If no parametrized args were named, don't parametrize
if not argnames:
return False
# If parametrized args were named and values were generated, parametrize
elif any(argvalues):
return True
# If parametrized args were named, but no values were generated, mark this test to be
# removed from the test collection. Otherwise, py.test will try to find values for the
# items in argnames by looking in its fixture pool, which will almost certainly fail.
else:
# module and class are optional, but function isn't
modname = getattr(metafunc.module, '__name__', None)
classname = getattr(metafunc.cls, '__name__', None)
funcname = metafunc.function.__name__
test_name = '.'.join(filter(None, (modname, classname, funcname)))
uncollect_msg = 'Parametrization for {} yielded no values,'\
' marked for uncollection'.format(test_name)
logger.warning(uncollect_msg)
# apply the mark
pytest.mark.uncollect(reason=uncollect_msg)(metafunc.function)
def parametrize(metafunc, argnames, argvalues, *args, **kwargs):
"""parametrize wrapper that calls :py:func:`_param_check`, and only parametrizes when needed
This can be used in any place where conditional parametrization is used.
"""
if _param_check(metafunc, argnames, argvalues):
metafunc.parametrize(argnames, argvalues, *args, **kwargs)
# if param check failed and the test was supposed to be parametrized around a provider
elif 'provider' in metafunc.fixturenames:
try:
# hack to pass trough in case of a failed param_check
# where it sets a custom message
metafunc.function.uncollect
except AttributeError:
pytest.mark.uncollect(
reason="provider was not parametrized did you forget --use-provider?"
)(metafunc.function)
def generate(*args, **kwargs):
"""Functional handler for inline pytest_generate_tests definition
Args:
gen_func: Test generator function, expected to return argnames, argvalues, and an idlist
suitable for use with pytest's parametrize method in pytest_generate_tests hooks
indirect: Optional keyword argument. If seen, it will be removed from the kwargs
passed to gen_func and used in the wrapped pytest parametrize call
scope: Optional keyword argument. If seen, it will be removed from the kwargs
passed to gen_func and used in the wrapped pytest parametrize call
filter_unused: Optional keyword argument. If True (the default), parametrized tests will
be inspected, and only argnames matching fixturenames will be used to parametrize the
test. If seen, it will be removed from the kwargs passed to gen_func.
*args: Additional positional arguments which will be passed to ``gen_func``
**kwargs: Additional keyword arguments whill be passed to ``gen_func``
Usage:
# Abstract example:
pytest_generate_tests = testgen.generate(arg1, arg2, kwarg1='a')
# Concrete example using all infrastructure providers and module scope
pytest_generate_tests = testgen.generate([InfraProvider], scope="module")
# Another concrete example using only VMware and SCVMM providers with 'retire' flag
pf = ProviderFilter(
classes=[WMwareProvider, SCVMMProvider]), required_flags=['retire'])
pytest_generate_tests = testgen.generate(
gen_func=testgen.providers, filters=[pf], scope="module")
Note:
``filter_unused`` is helpful, in that you don't have to accept all of the args in argnames
in every test in the module. However, if all tests don't share one common parametrized
argname, py.test may not have enough information to properly organize tests beyond the
'function' scope. Thus, when parametrizing in the module scope, it's a good idea to include
at least one common argname in every test signature to give pytest a clue in sorting tests.
"""
# Pull out/default kwargs for this function and parametrize; any args and kwargs that are not
# pulled out here will be passed into gen_func within pytest_generate_tests below
scope = kwargs.pop('scope', 'function')
indirect = kwargs.pop('indirect', False)
filter_unused = kwargs.pop('filter_unused', True)
gen_func = kwargs.pop('gen_func', providers_by_class)
from cfme.utils.pytest_shortcuts import fixture_filter
# If parametrize doesn't get you what you need, steal this and modify as needed
def pytest_generate_tests(metafunc):
# Pass through of args and kwargs
argnames, argvalues, idlist = gen_func(metafunc, *args, **kwargs)
# Filter out argnames that aren't requested on the metafunc test item, so not all tests
# need all fixtures to run, and tests not using gen_func's fixtures aren't parametrized.
if filter_unused:
argnames, argvalues = fixture_filter(metafunc, argnames, argvalues)
# See if we have to parametrize at all after filtering
parametrize(metafunc, argnames, argvalues, indirect=indirect, ids=idlist, scope=scope)
return pytest_generate_tests
def providers(metafunc, filters=None):
""" Gets providers based on given (+ global) filters
Note:
Using the default 'function' scope, each test will be run individually for each provider
before moving on to the next test. To group all tests related to single provider together,
parametrize tests in the 'module' scope.
Note:
testgen for providers now requires the usage of test_flags for collection to work.
Please visit http://cfme-tests.readthedocs.org/guides/documenting.html#documenting-tests
for more details.
"""
filters = filters or []
argnames = []
argvalues = []
idlist = []
# Obtains the test's flags in form of a ProviderFilter
meta = getattr(metafunc.function, 'meta', None)
test_flag_str = getattr(meta, 'kwargs', {}).get('from_docs', {}).get('test_flag')
if test_flag_str:
test_flags = test_flag_str.split(',')
flags_filter = ProviderFilter(required_flags=test_flags)
filters = filters + [flags_filter]
for provider in list_providers(filters):
argvalues.append([provider])
# Use the provider key for idlist, helps with readable parametrized test output
idlist.append(provider.key)
# Add provider to argnames if missing
if 'provider' in metafunc.fixturenames and 'provider' not in argnames:
metafunc.function = pytest.mark.uses_testgen()(metafunc.function)
argnames.append('provider')
if metafunc.config.getoption('sauce'):
break
return argnames, argvalues, idlist
def providers_by_class(metafunc, classes, required_fields=None):
""" Gets providers by their class
Args:
metafunc: Passed in by pytest
classes: List of classes to fetch
required_fields: See :py:class:`cfme.utils.provider.ProviderFilter`
Usage:
# In the function itself
def pytest_generate_tests(metafunc):
argnames, argvalues, idlist = testgen.providers_by_class(
[GCEProvider, AzureProvider], required_fields=['provisioning']
)
metafunc.parametrize(argnames, argvalues, ids=idlist, scope='module')
# Using the parametrize wrapper
pytest_generate_tests = testgen.parametrize([GCEProvider], scope='module')
"""
pf = ProviderFilter(classes=classes, required_fields=required_fields)
return providers(metafunc, filters=[pf])
def all_providers(metafunc, **options):
""" Returns providers of all types """
return providers_by_class(metafunc, [BaseProvider], **options)
def auth_groups(metafunc, auth_mode):
"""Provides two test params based on the 'auth_modes' and 'group_roles' in cfme_data:
``group_name``:
expected group name in provided by the backend specified in ``auth_mode``
``group_data``:
list of nav destinations that should be visible as a member of ``group_name``
Args:
auth_mode: One of the auth_modes specified in ``cfme_data.get('auth_modes', {})``
"""
argnames = ['group_name', 'group_data']
argvalues = []
idlist = []
if auth_mode in auth_data.get('auth_providers', {}):
# If auth_modes exists, group_roles is assumed to exist as well
for group in group_data:
argvalues.append([group, sorted(group_data[group])])
idlist.append(group)
return argnames, argvalues, idlist
def config_managers(metafunc):
"""Provides config managers
"""
argnames = ['config_manager_obj']
argvalues = []
idlist = []
data = cfme_data.get('configuration_managers', {})
for cfg_mgr_key in data:
argvalues.append([get_config_manager_from_config(cfg_mgr_key)])
idlist.append(cfg_mgr_key)
return argnames, argvalues, idlist
def pxe_servers(metafunc):
"""Provides pxe data
"""
argnames = ['pxe_name']
argvalues = [[k] for k in sorted(cfme_data.get('pxe_servers', {}).keys())]
idlist = [k[0] for k in argvalues]
return argnames, argvalues, idlist
|
akarol/cfme_tests
|
cfme/utils/testgen.py
|
Python
|
gpl-2.0
| 13,742
|
[
"VisIt"
] |
80a310d6e49a67d9dfa65ceb0be32871d70df778a72816bf528a9da56994a1b3
|
"""Wrapper for launching pdbquery"""
"""
This file is part of RAPD
Copyright (C) 2017, Cornell University
All rights reserved.
RAPD is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, version 3.
RAPD is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
__created__ = "2017-04-20"
__maintainer__ = "Frank Murphy"
__email__ = "fmurphy@anl.gov"
__status__ = "Development"
# Standard imports
import argparse
import multiprocessing
import os
import sys
import uuid
import importlib
# RAPD imports
import utils.log
import utils.modules as modules
import utils.text as text
import utils.commandline_utils as commandline_utils
def construct_command(commandline_args):
"""
Put together the command for the plugin
commandline_args needs to look like:
class commandline_args(object):
clean = True | False
data_file = ""
struct_file = ""
json = True | False
no_color = True | False
nproc = int
progress = True | False
run_mode = "interactive" | "json" | "server" | "subprocess"
test = True | False
verbose = True | False
"""
# The task to be carried out
command = {
"command": "MR",
"process_id": uuid.uuid1().get_hex(),
"status": 0,
}
# Work directory
work_dir = commandline_utils.check_work_dir(
os.path.join(
os.path.abspath(os.path.curdir),
"rapd_mr_%s" % ".".join(
os.path.basename(commandline_args.data_file).split(".")[:-1])),
active=True,
up=commandline_args.dir_up)
command["directories"] = {
"work": work_dir,
"exchange_dir": commandline_args.exchange_dir
}
# Information on input
command["input_data"] = {
"data_file": os.path.abspath(commandline_args.data_file),
"struct_file": os.path.abspath(commandline_args.struct_file),
#"db_settings": commandline_args.db_settings
}
# Plugin settings
command["preferences"] = {
"clean": commandline_args.clean,
"json": commandline_args.json,
"nproc": commandline_args.nproc,
"progress": commandline_args.progress,
"run_mode": commandline_args.run_mode,
"test": commandline_args.test,
"adf": commandline_args.adf,
}
return command
def get_commandline():
"""Grabs the commandline"""
# Parse the commandline arguments
commandline_description = "Launch mr plugin"
parser = argparse.ArgumentParser(description=commandline_description)
# Parse the commandline arguments
#commandline_description = """Launch an MR on input model and data"""
#parser = argparse.ArgumentParser(parents=[commandline_utils.dp_parser],
# description=commandline_description)
# Run in test mode
parser.add_argument("-t", "--test",
action="store_true",
dest="test",
help="Run in test mode")
# Verbose
parser.add_argument("-v", "--verbose",
action="store_true",
dest="verbose",
help="More output")
# Quiet
parser.add_argument("-q", "--quiet",
action="store_false",
dest="verbose",
help="Run with less output")
# The site
parser.add_argument("-s", "--site",
action="store",
dest="site",
help="Define the site (ie. NECAT)")
# Clean
parser.add_argument("--clean",
action="store_true",
dest="clean",
help="Remove intermediate files")
# Color
parser.add_argument("--color",
action="store_false",
dest="no_color",
default=False,
help="Color the terminal output")
# No color
parser.add_argument("--nocolor",
action="store_true",
dest="no_color",
help="Do not color the terminal output")
# JSON Output
parser.add_argument("-j", "--json",
action="store_true",
dest="json",
help="Output JSON format string")
# Output progress updates?
parser.add_argument("--progress",
action="store_true",
dest="progress",
help="Output progress updates to the terminal")
# Set filehandle for progress output
parser.add_argument("--progress-fd",
action="store",
dest="progress_fd",
default=False,
help="Output progress updates to a file descriptor. No need to also use --progress, unless you want JSON on the terminal too.")
# Multiprocessing
parser.add_argument("--nproc",
dest="nproc",
type=int,
default=max(1, multiprocessing.cpu_count() - 1),
help="Number of processors to employ")
# Calculate ADF map on solutions
parser.add_argument("--adf",
action="store_true",
dest="adf",
help="Calculate Anomalous Difference Fourier map on solution")
# Run specific structure file
parser.add_argument("--struct_file", "--pdb", "--cif",
dest="struct_file",
required=True,
help="PDB/mmCIF file path or a PDB code.")
# Positional argument
parser.add_argument("--data_file", "--mtz",
dest="data_file",
required=True,
help="Name of data file to be analyzed")
# Print help message if no arguments
if len(sys.argv[1:])==0:
parser.print_help()
parser.exit()
args = parser.parse_args()
# Fixes a problem from plugin-called code
args.exchange_dir = False
args.db_settings = False
# Insert logic to check or modify args here
# Running in interactive mode if this code is being called
if args.json:
args.run_mode = "json"
else:
args.run_mode = "interactive"
# Show progress in interactive version
args.progress = True
return args
def print_welcome_message(printer):
"""Print a welcome message to the terminal"""
message = """
------------
RAPD MR
------------"""
printer(message, 50, color="blue")
def main():
"""
The main process
Setup logging and instantiate the model"""
# Get the commandline args
commandline_args = get_commandline()
# Output log file is always verbose
log_level = 10
# Set up logging
logger = utils.log.get_logger(logfile_dir="./",
logfile_id="rapd_mr",
level=log_level,
#console=commandline_args.test
console=False)
# Set up terminal printer
# Verbosity
if commandline_args.json:
terminal_log_level = 100
elif commandline_args.verbose:
terminal_log_level = 10
else:
terminal_log_level = 50
tprint = utils.log.get_terminal_printer(verbosity=terminal_log_level,
no_color=commandline_args.no_color,
progress=commandline_args.progress)
print_welcome_message(tprint)
logger.debug("Commandline arguments:")
tprint(arg="\nCommandline arguments:", level=10, color="blue")
for pair in commandline_args._get_kwargs():
logger.debug(" arg:%s val:%s", pair[0], pair[1])
tprint(arg=" arg:%-20s val:%s" % (pair[0], pair[1]), level=10, color="white")
# Get the environmental variables
environmental_vars = utils.site.get_environmental_variables()
logger.debug("" + text.info + "Environmental variables" + text.stop)
tprint("\nEnvironmental variables", level=10, color="blue")
for key, val in environmental_vars.iteritems():
logger.debug(" " + key + " : " + val)
tprint(arg=" arg:%-20s val:%s" % (key, val), level=10, color="white")
# Should working directory go up or down?
if environmental_vars.get("RAPD_DIR_INCREMENT") in ("up", "UP"):
commandline_args.dir_up = True
else:
commandline_args.dir_up = False
# Get site - commandline wins over the environmental variable
site = False
site_module = False
if commandline_args.site:
site = commandline_args.site
elif environmental_vars.has_key("RAPD_SITE"):
site = environmental_vars["RAPD_SITE"]
# If someone specifies the site or found in env.
if site and not site_module:
site_file = utils.site.determine_site(site_arg=site)
site_module = importlib.import_module(site_file)
# Construct the command
command = construct_command(commandline_args=commandline_args)
# Load the plugin
plugin = modules.load_module(seek_module="plugin",
directories=["plugins.mr"],
logger=logger)
# Print out plugin info
tprint(arg="\nPlugin information", level=10, color="blue")
tprint(arg=" Plugin type: %s" % plugin.PLUGIN_TYPE, level=10, color="white")
tprint(arg=" Plugin subtype: %s" % plugin.PLUGIN_SUBTYPE, level=10, color="white")
tprint(arg=" Plugin version: %s" % plugin.VERSION, level=10, color="white")
tprint(arg=" Plugin id: %s" % plugin.ID, level=10, color="white")
# Run the plugin
# Instantiate the plugin
plugin_instance = plugin.RapdPlugin(command=command,
site=site_module,
tprint=tprint,
logger=logger)
plugin_instance.start()
if __name__ == "__main__":
main()
|
RAPD/RAPD
|
src/plugins/mr/commandline.py
|
Python
|
agpl-3.0
| 10,746
|
[
"ADF"
] |
acfc3a731a3ea09d024c1b4ab054d004082bb0f66fb8205128c55de1b39fb151
|
# Stub models file
from dimagi.ext.couchdbkit import Document
# ensure our signals get loaded at django bootstrap time
from . import signals
from corehq.apps.users.models import CommCareCase
from custom.succeed.reports import VISIT_SCHEDULE, LAST_INTERACTION_LIST, PM3
import fluff
from custom.utils.utils import flat_field
from fluff.filters import CustomFilter
class _(Document):
pass
def get_randomization_date(case):
return case['randomization_date']
def get_next_visit(case):
actions = list(case['actions'])
next_visit = VISIT_SCHEDULE[0]
for visit_key, visit in enumerate(VISIT_SCHEDULE):
for key, action in enumerate(actions):
if visit['xmlns'] == action['xform_xmlns']:
try:
next_visit = VISIT_SCHEDULE[visit_key + 1]
del actions[key]
break
except IndexError:
next_visit = {
'visit_name': 'last',
'days': -1
}
return next_visit
def visit_name(case):
next_visit = get_next_visit(case)
return next_visit['visit_name']
def visit_days(case):
next_visit = get_next_visit(case)
return next_visit['days']
def is_active(case):
active = 'True'
for action in case['actions']:
if PM3 == action['xform_xmlns']:
active = 'False'
break
return active
def last_interaction(case):
last_inter = None
for action in case['actions']:
if action['xform_xmlns'] in LAST_INTERACTION_LIST:
last_inter = action
return last_inter['date']
def get_property(case, property):
try:
category = case[property]
except AttributeError:
category = ''
return category
class RandomizationDate(fluff.Calculator):
@fluff.date_emitter
def date(self, case):
yield {
'date': get_randomization_date(case),
'value': 1
}
class UCLAPatientFluff(fluff.IndicatorDocument):
document_class = CommCareCase
domains = ('succeed',)
document_filter = CustomFilter(lambda c: c.type == 'participant')
group_by = ('domain', )
save_direct_to_sql = True
name = flat_field(lambda case: case.full_name)
mrn = flat_field(lambda case: case['mrn'])
owner_id = flat_field(lambda case: case.owner_id)
user_id = flat_field(lambda case: case.user_id)
bp_category = flat_field(lambda case: get_property(case, 'BP_category'))
care_site = flat_field(lambda case: get_property(case, 'care_site').lower())
is_active = flat_field(lambda case: is_active(case))
visit_name = flat_field(lambda case: visit_name(case))
visit_days = flat_field(lambda case: visit_days(case))
last_interaction = flat_field(lambda case: last_interaction(case))
emitter = RandomizationDate()
UCLAPatientFluffPillow = UCLAPatientFluff.pillow()
|
puttarajubr/commcare-hq
|
custom/succeed/models.py
|
Python
|
bsd-3-clause
| 2,931
|
[
"VisIt"
] |
68d317fb0604cca9fff20c940f8c3534cee34eb416856287f2d580e1681fda09
|
from unittest import TestCase
from StringIO import StringIO
from mock import MagicMock
from apetools.commands.ipconfig import Ipconfig
from apetools.connections.localconnection import OutputError
output = """
Windows IP Configuration
Wireless LAN adapter Wireless Network Connection 3:
Media State . . . . . . . . . . . : Media disconnected
Connection-specific DNS Suffix . :
Wireless LAN adapter Wireless Network Connection 2:
Media State . . . . . . . . . . . : Media disconnected
Connection-specific DNS Suffix . :
Wireless LAN adapter Wireless Network Connection:
Connection-specific DNS Suffix . : testnetwork.local
Link-local IPv6 Address . . . . . : fe80::3988:30da:5414:8180%12
IPv4 Address. . . . . . . . . . . : 192.168.20.99
Subnet Mask . . . . . . . . . . . : 255.255.255.0
Default Gateway . . . . . . . . . : fe80::226:5aff:feff:4294%12
192.168.20.1
Ethernet adapter Local Area Connection:
Connection-specific DNS Suffix . :
Link-local IPv6 Address . . . . . : fe80::495a:6a04:eded:5daf%11
IPv4 Address. . . . . . . . . . . : 192.168.10.63
Subnet Mask . . . . . . . . . . . : 255.255.255.0
Default Gateway . . . . . . . . . : 192.168.10.1
Tunnel adapter isatap.{9703F5B2-AE1F-493C-8B6A-1231760A6A63}:
Media State . . . . . . . . . . . : Media disconnected
Connection-specific DNS Suffix . :
Tunnel adapter isatap.testnetwork.local:
Media State . . . . . . . . . . . : Media disconnected
Connection-specific DNS Suffix . :
Tunnel adapter isatap.{D46350ED-E789-4453-A0B9-8B6CFF700B00}:
Media State . . . . . . . . . . . : Media disconnected
Connection-specific DNS Suffix . :
Tunnel adapter isatap.{754E07B6-882B-4D20-BB0B-356E6D324B43}:
Media State . . . . . . . . . . . : Media disconnected
Connection-specific DNS Suffix . :
Tunnel adapter Teredo Tunneling Pseudo-Interface:
Connection-specific DNS Suffix . :
IPv6 Address. . . . . . . . . . . : 2001:0:4137:9e76:1051:d59:3f57:eb9c
Link-local IPv6 Address . . . . . : fe80::1051:d59:3f57:eb9c%15
Default Gateway . . . . . . . . . : ::
"""
DISABLED = """
Windows IP Configuration
Ethernet adapter Local Area Connection:
Connection-specific DNS Suffix . :
Link-local IPv6 Address . . . . . : fe80::495a:6a04:eded:5daf%11
IPv4 Address. . . . . . . . . . . : 192.168.10.63
Subnet Mask . . . . . . . . . . . : 255.255.255.0
Default Gateway . . . . . . . . . : 192.168.10.1
Tunnel adapter isatap.{9703F5B2-AE1F-493C-8B6A-1231760A6A63}:
Media State . . . . . . . . . . . : Media disconnected
Connection-specific DNS Suffix . :
Tunnel adapter isatap.testnetwork.local:
Media State . . . . . . . . . . . : Media disconnected
Connection-specific DNS Suffix . :
Tunnel adapter isatap.{D46350ED-E789-4453-A0B9-8B6CFF700B00}:
Media State . . . . . . . . . . . : Media disconnected
Connection-specific DNS Suffix . :
Tunnel adapter isatap.{754E07B6-882B-4D20-BB0B-356E6D324B43}:
Media State . . . . . . . . . . . : Media disconnected
Connection-specific DNS Suffix . :
Tunnel adapter Teredo Tunneling Pseudo-Interface:
Connection-specific DNS Suffix . :
IPv6 Address. . . . . . . . . . . : 2001:0:4137:9e76:83:15ea:3f57:f5c0
Link-local IPv6 Address . . . . . : fe80::83:15ea:3f57:f5c0%15
Default Gateway . . . . . . . . . : ::
"""
class TestIpconfig(TestCase):
def setUp(self):
self.connection = MagicMock()
self.command = Ipconfig(self.connection)
return
def test_expression(self):
line = " IPv4 Address. . . . . . . . . . . : 192.168.20.99"
self.assertRegexpMatches(line, self.command.ip_expression)
return
def test_ip(self):
expected = "192.168.20.99"
self.connection.ipconfig.return_value = OutputError(StringIO(output), "")
actual = self.command.address
self.assertEqual(expected, actual)
return
def test_disabled(self):
self.connection.ipconfig.return_value = OutputError(StringIO(DISABLED), "")
self.assertEqual(self.command.not_available, self.command.address)
return
# end class TestIpconfig
|
rsnakamura/oldape
|
tests/testunits/testcommands/testipconfig/testipconfig.py
|
Python
|
apache-2.0
| 4,225
|
[
"FEFF"
] |
66ad0a9241634601823ddf15691a7b4a333bd5a77aa01dde885aa132e8ca680d
|
from django.db import models
from edc_base.model_fields.custom_fields import OtherCharField
from edc_base.model_managers import HistoricalRecords
from ..managers import CurrentSiteManager
from edc_base.model_validators import date_not_future
from edc_constants.choices import YES_NO_NA
from edc_visit_tracking.managers import CrfModelManager
from ..choices import FLUCONAZOLE_DOSE, RANKIN_SCORE, YES_NO_ND, YES_NO_ALREADY_ND
from .model_mixins import CrfModelMixin, ClinicalAssessmentModelMixin
class FollowUp(ClinicalAssessmentModelMixin, CrfModelMixin):
fluconazole_dose = models.CharField(
verbose_name='Fluconazole dose (day prior to visit)',
max_length=25,
choices=FLUCONAZOLE_DOSE)
fluconazole_dose_other = OtherCharField(
verbose_name='If other, specify dose:',
max_length=25)
rifampicin_started = models.CharField(
verbose_name='Rifampicin started since last visit?',
max_length=25,
choices=YES_NO_ALREADY_ND)
rifampicin_start_date = models.DateField(
verbose_name='Date Rifampicin started',
validators=[date_not_future],
null=True,
blank=True,)
patient_help = models.CharField(
verbose_name=('Does the patient require help from'
' anybody for everyday activities? '),
max_length=10,
choices=YES_NO_ND,
help_text=('For example eating, drinking, washing,'
' brushing teeth, going to the toilet'))
patient_problems = models.CharField(
verbose_name='Has the illness left the patient with any other problems?',
max_length=10,
choices=YES_NO_ND)
rankin_score = models.CharField(
verbose_name='Modified Rankin score',
choices=RANKIN_SCORE,
max_length=10,
null=True)
other_significant_dx = models.CharField(
verbose_name='Other significant diagnosis since last visit?',
max_length=5,
choices=YES_NO_NA)
on_site = CurrentSiteManager()
objects = CrfModelManager()
history = HistoricalRecords()
class Meta(CrfModelMixin.Meta):
verbose_name = 'Follow-up'
verbose_name_plural = 'Follow-up'
|
botswana-harvard/ambition-subject
|
ambition_subject/models/follow_up.py
|
Python
|
gpl-3.0
| 2,214
|
[
"VisIt"
] |
51f586773dc6ae609129eca957d21140e2679b17b13e5a3a0e769a46359e2c11
|
import pickle
import time
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from scipy.interpolate import interp1d, Rbf
from scipy.io import loadmat, savemat
import sys, os
sys.path.append("./")
from Utils.RK4 import RK4
from Utils.boxgrid import boxgrid
from Utils.compare import compare
from EntryGuidance.EntryEquations import Entry, EDL
from EntryGuidance.MCF import mcsplit, mcfilter
from EntryGuidance.EntryPlots import EntryPlots
from EntryGuidance.InitialState import InitialState
from EntryGuidance.Planet import Planet
from EntryGuidance.Simulation import Simulation, Cycle, EntrySim
from EntryGuidance.SRPController import SRPController, SRPControllerTrigger, update_rule_maker
from EntryGuidance.SRPData import SRPData
from EntryGuidance.SRPUtils import range_from_entry, srp_from_entry
from EntryGuidance.Target import Target
# from EntryGuidance.VMC import VMC, velocity_trigger
from EntryGuidance.Triggers import Trigger, VelocityTrigger
from EntryGuidance.Uncertainty import getUncertainty
SRPFILE = os.path.join(os.getcwd(), "data\\FuelOptimal\\srp_7200kg.pkl")
def simulate(x0, InputSample, plot=False):
""" Runs a scalar simulation with the SRP Controller called multiple times """
target = Target(0, 700/3397, 0)
TC = 2 # time constant
srpdata = pickle.load(open(SRPFILE,'rb'))
# mcc = SRPController(N=[60, 200], target=target, srpdata=srpdata, update_function=update_rule_maker([5490, 4000, 2000, 1200]), debug=False, time_constant=TC)
mcc = SRPController(N=[6, 2], target=target, srpdata=srpdata, update_function=update_rule_maker([5490, 4500, 3000, 2000, 1000]), debug=False, time_constant=TC)
# mcc = SRPController(N=[6, 2], target=target, srpdata=srpdata, update_function=update_rule_maker([5490, 4700, 4000, 3000, 2000, 1000]), debug=False, time_constant=TC)
Vf = 450 # Anything lower than the optimal trigger point is fine
states = ['Entry']
trigger = [SRPControllerTrigger(mcc, -10)] # Go 10 m/s lower than the true trigger point says to
sim_inputs = {'states': states, 'conditions': trigger}
# sim = Simulation(cycle=Cycle(1), output=True, use_da=False, **EntrySim(Vf=Vf), )
sim = Simulation(cycle=Cycle(1), output=False, use_da=False, **sim_inputs)
sim.run(x0, [mcc], TimeConstant=TC, InputSample=InputSample, StepsPerCycle=10)
mf = mcc.srp_trim(sim.history)
# mcc.plot_history()
print("Final fuel consumed: {:.1f} kg".format(mf))
if plot:
sim.plot() # The trajectory resulting from integrating the controller commands
mcc.plot_history()
# mcc.sim.plot() # The (last) trajectory predicted by the controller
# v1 = sim.df['velocity']
# v2 = mcc.sim.df['velocity']
# for var in ['lift']:
# y1 = sim.df[var]
# y2 = mcc.sim.df[var]
# compare(v1, y1, v2, y2, N=None, plot=True)
# plt.suptitle(var.capitalize())
return mf, sim.df, mcc.history
def monte_carlo():
# generates a set of inputs and saves them to a csv file
# after each simulation, the relevant states are added to the csv file
# and the simulation is stored in a separate pickle file
# then it moves on to the next sample
# TODO: Consider what happens if a sim fails (error) or bad SRP solutions (100k)
savefile = "first_monte_carlo"
EFPA = -15.75 # nominal
x0 = InitialState(vehicle='heavy', fpa=np.radians(EFPA))
# P0 =
# gaussian = cp.MvNormal(x0, P0)
# X0 = gaussian.sample(N, 'L')
N = 3
n_parametric = 4
n_states = 6 # up to 6, but we don't NEED to perturb altitude, velocity shouldn't have a significant impact either
U = getUncertainty(initial=True)
Ns = 80
parametric_samples = U['parametric'].sample(Ns, 'L').T
state_samples = U['initial'].sample(Ns, 'L').T
# parametric_samples = boxgrid([[-0.1, 0.1] for _ in range(n_parametric)], N)
print(parametric_samples.shape)
# print(parametric_samples)
if os.path.isfile(f"./data/FuelOptimal/{savefile}.csv"): # should just be a check to see if the savefile already exists
df_existing = pd.read_csv(f"./data/FuelOptimal/{savefile}.csv")
data_existing = df_existing.values.T
sample_existing = data_existing[3:7].T
traj_data_existing = pickle.load(open(f"./data/FuelOptimal/{savefile}.pkl", 'rb'))
else: # Use this when starting a new run, so the savefiles dont already exist
df_existing = None
sample_existing = []
traj_data_existing = {}
data = [] # the summary data to go in a csv
traj_data = {}
for n,full_sample in enumerate(zip(parametric_samples,state_samples)):
sample,dx0 = full_sample
dx0 = np.append(dx0, [0])
print("\nSample {}: ".format(n+1))
print(sample)
print("State delta:")
print(dx0)
already_run = False
for previous_sample in sample_existing:
if tuple(sample) == tuple(previous_sample):
already_run = True
break
if already_run:
print("Already run this sample\n")
continue
try:
propellant, traj, history = simulate(x0+dx0, sample, False)
data.append([propellant, *history['params'][-1], *sample, EFPA+np.degrees(dx0[4]), np.degrees(dx0[5]), *history['ignition_state'][-1], *history['entry_state'][-1]])
traj_data[tuple(sample)] = {'traj': traj, 'history': history}
traj_data_existing.pop(tuple(sample), None) # ensures we save the correct traj/hist
except:
print("Simulation failed")
if data:
data = np.array(data).T
data[1] = np.degrees(data[1])
df = pd.DataFrame(data.T, columns=["fuel", "bank"," vr", 'cd','cl','rho0','hs',"efpa", "eazi", 'x','y','z','vx','vz', 'r', 'lon','lat', 'v', 'fpa','azi','m'])
if df_existing is not None:
df = pd.concat([df_existing, df], ignore_index=True)
# try-except because one time my dumb@$$ had temp.csv open and I lost 2+ hours worth of computations
try:
df.to_csv(f"./data/FuelOptimal/{savefile}.csv", index=False)
except:
df.to_csv("./data/FuelOptimal/temp1283787391.csv", index=False)
if df_existing is not None:
traj_data.update(traj_data_existing)
pickle.dump(traj_data, open(f"./data/FuelOptimal/{savefile}.pkl", 'wb'))
def plot_monte_carlo_data():
""" Calls various individual plotting methods for scatter, trajectory plots and more """
# savefile = "parametric_boxgrid"
savefile = "first_monte_carlo"
savedir = f"./data/FuelOptimal/images/{savefile}"
# summary data - solution, ignition state
df = pd.read_csv(f"./data/FuelOptimal/{savefile}.csv")
data = df.values.T
samples = data[3:7].T
pmf = df['fuel'].values/df['m'].values * 100
bad = df['fuel'].values > 5000
high = df['z'] > 5000
bad = np.logical_or(bad, high)
good = np.invert(bad)
print("Median: {:.2f}".format(np.percentile(pmf[good], 50)))
print("99%: {:.2f}".format(np.percentile(pmf[good], 99)))
# monte_carlo_filter(df[np.invert(bad)]) # filter only the good ones
# monte_carlo_filter(df) # filter the determine why the failed cases did
plot_ignitions(df[np.invert(bad)], savedir=savedir)
# plt.figure()
# plt.scatter(df['cd'], df['cl'], c=bad)
# plt.xlabel("Cd")
# plt.ylabel("Cl")
# plt.figure()
# plt.scatter(df['rho0'], df['hs'], c=bad)
# plt.xlabel("rho0")
# plt.ylabel("hs")
try:
nfigs = plt.gcf().number + 1
except:
nfigs = 0
# dictionary with samples as keys and 'traj' and 'history' entries
# traj_data = pickle.load(open(f"./data/FuelOptimal/{savefile}.pkl", 'rb'))
# plot_trajectories(traj_data, savedir=savedir, fignum_offset=nfigs)
plt.show()
# def high_pmf(pmf):
def monte_carlo_filter(df, history=None):
inputs = df[['cd','cl','rho0','hs','efpa','eazi']]
names = ['CD','CL', 'rho0', 'hs','efpa','eazi']
pmf = df['fuel']/df['m'] * 100
if np.any(df['fuel'] >= 10000):
pmf_boundary = 5000/7200*100
else:
pmf_boundary = np.percentile(pmf, 85)
print(pmf_boundary)
high = pmf >= pmf_boundary
low = np.invert(high)
B = inputs[low].values.T
NB = inputs[high].values.T
print("Filter for PMF")
df_mcf = mcfilter(B, NB, names, p_threshold=1)
# print(df_mcf)
cr = np.abs(df['lat'])
low = cr <= np.percentile(cr, 80)
B = inputs[low].values.T
NB = inputs[high].values.T
print("Filter for high crossrange")
df_mcf = mcfilter(B, NB, names, p_threshold=1)
def plot_trajectories(data, savedir, **kwargs):
nbad = 0
for key in data.keys():
traj = data[key]['traj']
hist = data[key]['history']
# check that the prop cost wasnt ridic, skip if so
mf = hist['fuel'][-1]
vf = hist['velocity'][-1]
if mf > 5000:
nbad += 1
continue
# trim DF for final vf
v = traj['velocity'].values
h = traj['altitude'].values
keep = np.logical_and(v >= vf, h >= 3)
EntryPlots(traj[v>=vf], plot_kw={'c':'b', 'alpha':0.1}, **kwargs)
EntryPlots(traj[v>=vf], savedir=savedir, plot_kw={'c':'b', 'alpha':0.1}, **kwargs)
print("{} bad trajectories out of {} total".format(nbad, len(data.keys())))
def plot_ignitions(data, figsize=(10,6), fontsize=14, ticksize=12, savedir=None, fignum_offset=0, label=None, plot_kw={}, grid=True):
import matplotlib as mpl
mpl.rc('image', cmap='inferno')
figs = ['dr_cr', 'alt_range','vz_vx', 'pmf', 'fpa']
clabel = {"label": 'PMF (%)'} # psotive label pad moves to the right. Units of 1/72 inches
pmf = data['fuel']/data['m'] * 100
data = data[pmf <= 22]
pmf = data['fuel']/data['m'] * 100
if 1: # This really just helps show how many points there are, otherwise too many are on top of one another
dr_range = (-1 + 2*np.random.random(data['fuel'].shape))
cr_range = (-0.25 + 0.5*np.random.random(data['fuel'].shape))
h_range = (-0.25 + 0.5*np.random.random(data['fuel'].shape))
else:
dr_range = cr_range = h_range = 0
cr = np.abs(data['y']/1000 + cr_range)
dr = data['x']/1000 + dr_range
d = (dr**2 + cr**2)**0.5
print("N CR > 1 km = {}".format(np.sum(cr >= 1)))
h = data['z']/1000
hmin = 3.25
h[h>hmin] += h_range[h>hmin]
fignum = 1 + fignum_offset
plt.figure(fignum, figsize=figsize)
plt.scatter(cr, dr, c=pmf, label=label, **plot_kw)
cb = plt.colorbar(**clabel)
cb.ax.tick_params(labelsize=ticksize)
cb.ax.yaxis.label.set_size(fontsize)
plt.plot(0,0, 'kx', markersize=8)
plt.gca().invert_yaxis()
plt.ylabel('Downrange distance to target at ignition (km)', fontsize=fontsize)
plt.xlabel('Crossrange distance to target at ignition (km)', fontsize=fontsize)
plt.tick_params(labelsize=ticksize)
plt.grid(grid)
plt.legend()
fignum +=1
plt.figure(fignum, figsize=figsize)
plt.scatter(d, h, c=pmf, label=label, **plot_kw)
plt.hlines(3, np.min(d)*0.95, np.max(d)*1.05, 'r')
if 0:
plt.plot(0,0, 'kx', markersize=8)
plt.axis('equal')
plt.gca().invert_xaxis()
cb = plt.colorbar(**clabel)
cb.ax.tick_params(labelsize=ticksize)
cb.ax.yaxis.label.set_size(fontsize)
plt.ylabel('Altitude above target at ignition (km)', fontsize=fontsize)
plt.xlabel('Horizontal distance to target at ignition (km)', fontsize=fontsize)
plt.tick_params(labelsize=ticksize)
plt.grid(grid)
plt.legend()
fignum +=1
plt.figure(fignum, figsize=(figsize[0]+4, figsize[1]))
plt.subplot(1, 2, 2)
plt.scatter(data['vx'], data['vz'], c=pmf, label=label, **plot_kw)
# plt.plot(0,0, 'kx', markersize=8)
cb = plt.colorbar(**clabel)
cb.ax.tick_params(labelsize=ticksize)
cb.ax.yaxis.label.set_size(fontsize)
plt.ylabel('Vertical Ignition Velocity (m/s)', fontsize=fontsize)
plt.xlabel('Downrange Ignition velocity (m/s)', fontsize=fontsize)
plt.tick_params(labelsize=ticksize)
plt.grid(grid)
plt.legend()
plt.subplot(1, 2, 1)
plt.grid(grid)
# plt.hist(data['v'], bins=20)
plt.scatter(data['v'], pmf)
plt.xlabel('Ignition Velocity Magnitude (m/s)', fontsize=fontsize)
plt.ylabel('PMF (%)', fontsize=fontsize)
plt.tick_params(labelsize=ticksize)
fignum +=1
plt.figure(fignum, figsize=figsize)
plt.grid(grid)
plt.hist(pmf, bins=20)
plt.xlabel('Propellant Mass Fraction (%)', fontsize=fontsize)
plt.tick_params(labelsize=ticksize)
fignum +=1
# plt.figure(fignum, figsize=figsize)
# plt.hist(data['v'], bins=20)
# # plt.ylabel('Vertical velocity at ignition (km)', fontsize=fontsize)
# plt.xlabel('Ignition Velocity (m/s)', fontsize=fontsize)
# plt.tick_params(labelsize=ticksize)
# plt.grid(grid)
# # plt.legend()
# fignum +=1
plt.figure(fignum, figsize=figsize)
plt.grid(grid)
# plt.hist(np.degrees(data['fpa']), bins=15)
plt.scatter(data['v'], np.degrees(data['fpa']), c=pmf)
cb = plt.colorbar(**clabel)
cb.ax.tick_params(labelsize=ticksize)
cb.ax.yaxis.label.set_size(fontsize)
plt.ylabel('Ignition FPA (deg)', fontsize=fontsize)
plt.xlabel('Ignition Velocity Magnitude (m/s)', fontsize=fontsize)
plt.tick_params(labelsize=ticksize)
# plt.ylabel('Vertical velocity at ignition (km)', fontsize=fontsize)
# plt.xlabel('Ignition FPA (deg)', fontsize=fontsize)
plt.tick_params(labelsize=ticksize)
# plt.legend()
fignum +=1
if savedir is not None:
if not os.path.isdir(savedir):
os.mkdir(savedir)
for i in range(len(figs)):
plt.figure(fignum_offset + i+1)
plt.savefig(os.path.join(savedir, "ignition_{}".format(figs[i])), bbox_inches='tight')
if __name__ == "__main__":
# monte_carlo()
plot_monte_carlo_data()
|
CDNoyes/EDL-Py
|
EntryGuidance/AAS20.py
|
Python
|
gpl-3.0
| 14,106
|
[
"Gaussian"
] |
77c449bbfab877c4e0efc403869a223a393652d8fc102e9ded530dbb183dfb4a
|
from datetime import date, timedelta
import warnings
from pyluach.dates import GregorianDate
from .core import UnitedStates
from ..registry_tools import iso_register
class HebrewHolidays:
hebrew_calendars = {}
@classmethod
def get_hebrew_calendar(cls, gregorian_year):
"""
Build and cache the Hebrew calendar for the given Gregorian Year.
"""
if gregorian_year not in cls.hebrew_calendars:
# Build the hebrew calendar for year
days = []
current_date = date(gregorian_year, 1, 1)
while current_date.year == gregorian_year:
hebrew_date = GregorianDate(
year=current_date.year,
month=current_date.month,
day=current_date.day,
).to_heb()
days.append(
(hebrew_date, current_date)
)
current_date += timedelta(days=1)
# Store it in the class property
cls.hebrew_calendars[gregorian_year] = days
# Return the hebrew calendar
return cls.hebrew_calendars[gregorian_year]
@classmethod
def search_hebrew_calendar(cls, gregorian_year, hebrew_month, hebrew_day):
"""
Search for a specific Hebrew month and day in the Hebrew calendar.
"""
calendar = cls.get_hebrew_calendar(gregorian_year)
search = filter(lambda item: item[0].month == hebrew_month, calendar)
search = filter(lambda item: item[0].day == hebrew_day, search)
for item in search:
return item[1]
@classmethod
def get_rosh_hashanah(cls, year):
"""
Return the gregorian date of the first day of Rosh Hashanah
"""
return cls.search_hebrew_calendar(year, 7, 1)
@classmethod
def get_yom_kippur(cls, year):
"""
Return the gregorian date of Yom Kippur.
"""
return cls.search_hebrew_calendar(year, 7, 10)
@iso_register('US-FL')
class Florida(UnitedStates):
"""Florida"""
include_thanksgiving_friday = True
thanksgiving_friday_label = "Friday after Thanksgiving"
include_columbus_day = False
include_federal_presidents_day = False
class FloridaLegal(Florida):
"""Florida Legal Holidays"""
FIXED_HOLIDAYS = Florida.FIXED_HOLIDAYS + (
(2, 15, 'Susan B. Anthony Day'),
(4, 2, 'Pascua Florida Day'),
(6, 14, 'Flag Day'),
)
include_fat_tuesday = True
include_lincoln_birthday = True
include_federal_presidents_day = True
include_good_friday = True
include_confederation_day = True
include_jefferson_davis_birthday = True
include_columbus_day = True
columbus_day_label = "Columbus Day and Farmers' Day"
include_election_day_every_year = True
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
warnings.warn(
"Florida's laws separate the definitions between paid versus legal"
" holidays. Be warned that Florida Legal specific Holidays are not"
" paid holidays."
)
def get_confederate_day(self, year):
"""
Confederation memorial day is on the April 26th for Florida Legal.
"""
return date(year, 4, 26), "Confederate Memorial Day"
def get_jefferson_davis_birthday(self, year):
"""
Jefferson Davis Birthday appears to be a fixed holiday (June 3rd)
"""
return date(year, 6, 3), "Jefferson Davis Birthday"
class FloridaCircuitCourts(HebrewHolidays, Florida):
"""Florida Circuits Courts"""
include_federal_presidents_day = True
include_good_friday = True
def get_variable_days(self, year):
days = super().get_variable_days(year)
days.append((
self.get_rosh_hashanah(year),
"Rosh Hashanah"
))
days.append((
self.get_yom_kippur(year),
"Yom Kippur"
))
return days
class FloridaMiamiDade(Florida):
"""Miami-Dade, Florida"""
include_federal_presidents_day = True
include_columbus_day = True
|
novapost/workalendar
|
workalendar/usa/florida.py
|
Python
|
mit
| 4,158
|
[
"COLUMBUS"
] |
a8128a01f7ad57e7d5c4613c90fdb63f4686d93f795fd816959185563cfd0155
|
# Copyright (C) 2019 The ESPResSo project
#
# This file is part of ESPResSo.
#
# ESPResSo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import unittest as ut
import importlib_wrapper
sample, skipIfMissingFeatures = importlib_wrapper.configure_and_import(
"@SAMPLES_DIR@/p3m.py", int_steps=100, int_n_times=5)
@skipIfMissingFeatures
class Sample(ut.TestCase):
system = sample.system
if __name__ == "__main__":
ut.main()
|
mkuron/espresso
|
testsuite/scripts/samples/test_p3m.py
|
Python
|
gpl-3.0
| 1,006
|
[
"ESPResSo"
] |
2e97c6c9e5844d4b16d6325e49759365e6e3601684fed9484ec25ed73107d551
|
# Bookmark plugin for Exaile media player
# Copyright (C) 2009-2011 Brian Parma
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
from __future__ import with_statement
from gi.repository import Gio
from gi.repository import GLib
from gi.repository import Gtk
import os
import logging
logger = logging.getLogger(__name__)
from xl import (
covers,
event,
player,
settings,
trax,
xdg,
providers
)
from xl.nls import gettext as _
from xlgui import guiutil, icons
from xlgui.widgets import dialogs, menu
import bookmarksprefs
# We want to use json to write bookmarks to files, cuz it's prettier (and safer)
# if we're on python 2.5 it's not available...
try:
import json
def _try_read(data):
# try reading using json, if it fails, try the old format
try:
return json.loads(data)
except ValueError:
return eval(data, {'__builtin__':None})
_write = lambda x: json.dumps(x, indent=2)
_read = _try_read
except ImportError:
_write = str
_read = lambda x: eval(x, {'__builtin__':None})
_smi = menu.simple_menu_item
_sep = menu.simple_separator
#TODO: to dict or not to dict. dict prevents duplicates, list of tuples preserves order (using tuples atm)
# does order matter?
def error(text):
logger.error("%s: %s" % ('Bookmarks', text))
dialogs.error(None, exaile.gui.main, text)
class Bookmarks:
def __init__(self, exaile):
self.bookmarks = []
# self.auto_db = {}
self.exaile = exaile
self.use_covers = settings.get_option('plugin/bookmarks/use_covers', False)
self.counter = 0
# setup menus
self.menu = menu.Menu(self)
self.delete_menu = menu.Menu(self)
# define factory-factory for sensitive-aware menuitems
def factory_factory(display_name, icon_name, callback=None, submenu=None):
def factory(menu_, parent, context):
item = Gtk.ImageMenuItem.new_with_mnemonic(display_name)
image = Gtk.Image.new_from_icon_name(icon_name,
size=Gtk.IconSize.MENU)
item.set_image(image)
# insensitive if no bookmarks present
if len(self.bookmarks) == 0:
item.set_sensitive(False)
else:
if callback is not None:
item.connect('activate', callback)
if submenu is not None:
item.set_submenu(submenu)
return item
return factory
items = []
items.append(_smi('bookmark', [], _('_Bookmark This Track'),
'bookmark-new', self.add_bookmark))
items.append(menu.MenuItem('delete', factory_factory(_('_Delete Bookmark'),
'gtk-close', submenu=self.delete_menu), ['bookmark']))
items.append(menu.MenuItem('clear', factory_factory(_('_Clear Bookmarks'),
'gtk-clear', callback=self.clear), ['delete']))
items.append(_sep('sep', ['clear']))
for item in items:
self.menu.add_item(item)
# TODO: automatic bookmarks, not yet possible
# - needs a way to get the time a file is inturrupted at
# set events - not functional yet
#event.add_callback(self.on_start_track, 'playback_start')
#event.add_callback(self.on_stop_track, 'playback_end')
#playback_end, playback_pause, playback_resume, stop_track
def do_bookmark(self, widget, data):
"""
This is called to resume a bookmark.
"""
key, pos = data
exaile = self.exaile
if not (key and pos):
return
# check if it's already playing
track = player.PLAYER.current
if track and track.get_loc_for_io() == key:
player.PLAYER.unpause()
player.PLAYER.seek(pos)
return
else:
# play it using the QUEUE
track = trax.Track(key)
if track: # make sure we got one
player.QUEUE.play(track)
player.PLAYER.seek(pos)
def add_bookmark(self, *args):
"""
Create bookmark for current track/position.
"""
# get currently playing track
track = player.PLAYER.current
if track is None:
error('Need a playing track to Bookmark.')
return
pos = player.PLAYER.get_time()
key = track.get_loc_for_io()
self.bookmarks.append((key,pos))
self.display_bookmark(key, pos)
def display_bookmark(self, key, pos):
"""
Create menu entrees for this bookmark.
"""
pix = None
# add menu item
try:
item = trax.Track(key)
title = item.get_tag_display('title')
if self.use_covers:
image = covers.MANAGER.get_cover(item, set_only=True)
if image:
try:
pix = icons.MANAGER.pixbuf_from_data(image, size=(16,16))
except GLib.GError:
logger.warn('Could not load cover')
pix = None
# no cover
else:
pix = None
except Exception:
logger.exception("Cannot open %s", key)
# delete offending key?
return
time = '%d:%02d' % (pos/60, pos%60)
label = '%s @ %s' % ( title , time )
counter = self.counter # closure magic (workaround for factories not having access to item)
# factory for new bookmarks
def factory(menu_, parent, context):
menu_item = Gtk.ImageMenuItem.new_with_mnemonic(label)
if pix:
menu_item.set_image(Gtk.image_new_from_pixbuf(pix))
if menu_ is self.menu:
menu_item.connect('activate', self.do_bookmark, (key,pos))
else:
menu_item.connect('activate', self.delete_bookmark, (counter,key,pos))
return menu_item
item = menu.MenuItem('bookmark{0}'.format(self.counter), factory, ['sep'])
self.menu.add_item(item)
self.delete_menu.add_item(item)
self.counter += 1
# save addition
self.save_db()
def clear(self, widget):
"""
Delete all bookmarks.
"""
# remove from menus
for item in self.delete_menu._items:
self.menu.remove_item(item)
self.delete_menu.remove_item(item)
self.bookmarks = []
self.save_db()
def delete_bookmark(self, widget, targets):
"""
Delete a bookmark.
"""
#print targets
counter, key, pos = targets
if (key, pos) in self.bookmarks:
self.bookmarks.remove((key,pos))
name = 'bookmark{0}'.format(counter)
for item in self.delete_menu._items:
if item.name == name:
self.delete_menu.remove_item(item)
self.menu.remove_item(item)
break
self.save_db()
def load_db(self):
"""
Load previously saved bookmarks from a file.
"""
path = os.path.join(xdg.get_data_dirs()[0],'bookmarklist.dat')
try:
# Load Bookmark List from file.
with open(path,'rb') as f:
data = f.read()
try:
db = _read(data)
for (key,pos) in db:
self.bookmarks.append((key,pos))
self.display_bookmark(key, pos)
logger.debug('loaded {0} bookmarks'.format(len(db)))
except Exception as s:
logger.error('BM: bad bookmark file: %s'%s)
return None
except IOError as e: # File might not exist
logger.error('BM: could not open file: %s' % e.strerror)
def save_db(self):
"""
Save list of bookmarks to a file.
"""
# Save List
path = os.path.join(xdg.get_data_dirs()[0],'bookmarklist.dat')
with open(path,'wb') as f:
f.write(_write(self.bookmarks))
logger.debug('saving {0} bookmarks'.format(len(self.bookmarks)))
def __enb(eventname, exaile, nothing):
GLib.idle_add(_enable, exaile)
def enable(exaile):
"""
Dummy initialization function, calls _enable when exaile is fully loaded.
"""
if exaile.loading:
event.add_callback(__enb, 'gui_loaded')
else:
__enb(None, exaile, None)
def _enable(exaile):
"""
Called when plugin is enabled. Set up the menus, create the bookmark class, and
load any saved bookmarks.
"""
bm = Bookmarks(exaile)
# add tools menu items
providers.register('menubar-tools-menu', _sep('plugin-sep', ['track-properties']))
item = _smi('bookmarks', ['plugin-sep'], _('_Bookmarks'),
'user-bookmarks', submenu=bm.menu)
providers.register('menubar-tools-menu', item)
bm.load_db()
def disable(exaile):
"""
Called when the plugin is disabled. Destroy menu.
"""
for item in providers.get('menubar-tools-menu'):
if item.name == 'bookmarks':
providers.unregister('menubar-tools-menu', item)
break
# vi: et ts=4 sts=4 sw=4
def get_preferences_pane():
return bookmarksprefs
|
Zarokka/exaile
|
plugins/bookmarks/__init__.py
|
Python
|
gpl-2.0
| 10,244
|
[
"Brian"
] |
f52aac99f7959029736b0a75e2fa39b1e4015f4f57a3a86cd881b87620feae7d
|
import sys, os, glob
if not os.path.exists('output'):
os.mkdir('output')
SCRIPT = '../nesoni_scripts/nesoni'
def run_nesoni(command, same_python=True):
full_command = '%s %s' % (SCRIPT, command)
if same_python:
full_command = sys.executable + ' ' + full_command
print
print 'Running:'
print full_command
print
assert 0 == os.system(full_command)
args = sys.argv[1:]
outer = [ 'shrimp', 'bowtie' ]
for aligner in outer:
for prefix2, section in [('unpaired_','reads:'),('paired_','pairs:')]:
for prefix3, monogamous_option in [('monogamous_',''), ('polygamous_',' --monogamous 0'),('random_',' --monogamous 0 --random 1')]:
prefix1 = aligner + '_'
name = 'output/'+prefix1+prefix2+prefix3+'consensus'
print
print '='*70
print name
print
for filename in glob.glob(os.path.join(name, '*')):
if not os.path.isdir(filename):
os.unlink(filename)
run_nesoni(r""" \
%(aligner)s: %(name)s \
data/NC_001422_modified.gbk \
%(section)s \
data/reads_1.txt.gz \
data/reads_2.txt.gz
""" % locals())
run_nesoni('consensus: ' + name + ' ' + monogamous_option)
|
Victorian-Bioinformatics-Consortium/nesoni
|
test/test_nesoni_consensus.py
|
Python
|
gpl-2.0
| 1,422
|
[
"Bowtie"
] |
acda74f0a998dc469351c44db6e173366a14f3f6ab36875af928a13fa07e8d20
|
# -*- Mode: python; tab-width: 4; indent-tabs-mode:nil; coding:utf-8 -*-
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 fileencoding=utf-8
#
# MDAnalysis --- https://www.mdanalysis.org
# Copyright (c) 2006-2017 The MDAnalysis Development Team and contributors
# (see the file AUTHORS for the full list of names)
#
# Released under the GNU Public Licence, v2 or any higher version
#
# Please cite your use of MDAnalysis in published work:
#
# R. J. Gowers, M. Linke, J. Barnoud, T. J. E. Reddy, M. N. Melo, S. L. Seyler,
# D. L. Dotson, J. Domanski, S. Buchoux, I. M. Kenney, and O. Beckstein.
# MDAnalysis: A Python package for the rapid analysis of molecular dynamics
# simulations. In S. Benthall and S. Rostrup editors, Proceedings of the 15th
# Python in Science Conference, pages 102-109, Austin, TX, 2016. SciPy.
# doi: 10.25080/majora-629e541a-00e
#
# N. Michaud-Agrawal, E. J. Denning, T. B. Woolf, and O. Beckstein.
# MDAnalysis: A Toolkit for the Analysis of Molecular Dynamics Simulations.
# J. Comput. Chem. 32 (2011), 2319--2327, doi:10.1002/jcc.21787
#
import pytest
from MDAnalysisTests.datafiles import (
TRZ, TRZ_psf,
waterPSF, waterDCD,
XYZ_mini,
)
from numpy.testing import assert_almost_equal
import numpy as np
from unittest import mock
from importlib import reload
import MDAnalysis as mda
from MDAnalysis.analysis.hydrogenbonds import (HydrogenBondAutoCorrel as HBAC,
find_hydrogen_donors)
class TestHydrogenBondAutocorrel(object):
@pytest.fixture()
def u(self):
return mda.Universe(TRZ_psf, TRZ)
@pytest.fixture()
def hydrogens(self, u):
return u.atoms.select_atoms('name Hn')
@pytest.fixture()
def nitrogens(self, u):
return u.atoms.select_atoms('name N')
@pytest.fixture()
def oxygens(self, u):
return u.atoms.select_atoms('name O')
# regression tests for different conditions
def test_continuous(self, u, hydrogens, oxygens, nitrogens):
hbond = HBAC(u,
hydrogens=hydrogens,
acceptors=oxygens,
donors=nitrogens,
bond_type='continuous',
sample_time=0.06,
)
hbond.run()
assert_almost_equal(
hbond.solution['results'],
np.array([ 1. , 0.92668623, 0.83137828,
0.74486804, 0.67741936, 0.60263932],
dtype=np.float32)
)
def test_continuous_excl(self, u, hydrogens, oxygens, nitrogens):
hbond = HBAC(u,
hydrogens=hydrogens,
acceptors=oxygens,
donors=nitrogens,
bond_type='continuous',
exclusions=(np.arange(len(hydrogens)), np.array(
range(len(oxygens)))),
sample_time=0.06,
)
hbond.run()
assert_almost_equal(
hbond.solution['results'],
np.array([ 1. , 0.92668623, 0.83137828,
0.74486804, 0.67741936, 0.60263932],
dtype=np.float32)
)
def test_intermittent(self, u, hydrogens, oxygens, nitrogens):
hbond = HBAC(u,
hydrogens=hydrogens,
acceptors=oxygens,
donors=nitrogens,
bond_type='intermittent',
sample_time=0.06,
)
hbond.run()
assert_almost_equal(
hbond.solution['results'],
np.array([ 1. , 0.92668623, 0.84310848,
0.79325515, 0.76392961, 0.72287393],
dtype=np.float32)
)
def test_intermittent_timecut(self, u, hydrogens, oxygens, nitrogens):
hbond = HBAC(u,
hydrogens=hydrogens,
acceptors=oxygens,
donors=nitrogens,
bond_type='intermittent',
time_cut=0.01, # time cut at traj.dt == continuous
sample_time=0.06,
)
hbond.run()
assert_almost_equal(
hbond.solution['results'],
np.array([ 1. , 0.92668623, 0.83137828,
0.74486804, 0.67741936, 0.60263932],
dtype=np.float32)
)
def test_intermittent_excl(self, u, hydrogens, oxygens, nitrogens):
hbond = HBAC(u,
hydrogens=hydrogens,
acceptors=oxygens,
donors=nitrogens,
bond_type='intermittent',
exclusions=(np.arange(len(hydrogens)), np.array(
range(len(oxygens)))),
sample_time=0.06,
)
hbond.run()
assert_almost_equal(
hbond.solution['results'],
np.array([ 1. , 0.92668623, 0.84310848,
0.79325515, 0.76392961, 0.72287393],
dtype=np.float32)
)
# For `solve` the test trajectories aren't long enough
# So spoof the results and check that solver finds solution
def test_solve_continuous(self, u, hydrogens, oxygens, nitrogens):
hbond = HBAC(u,
hydrogens=hydrogens,
acceptors=oxygens,
donors=nitrogens,
bond_type='continuous',
sample_time=0.06,
)
def actual_function_cont(t):
A1 = 0.75
A2 = 0.25
tau1 = 0.5
tau2 = 0.1
return A1 * np.exp(-t/tau1) + A2 * np.exp(-t/tau2)
hbond.solution['time'] = time = np.arange(0, 0.06, 0.001)
hbond.solution['results'] = actual_function_cont(time)
hbond.solve()
assert_almost_equal(
hbond.solution['fit'],
np.array([0.75, 0.5, 0.1]),
)
def test_solve_intermittent(self, u, hydrogens, oxygens, nitrogens):
hbond = HBAC(u,
hydrogens=hydrogens,
acceptors=oxygens,
donors=nitrogens,
bond_type='intermittent',
sample_time=0.06,
)
def actual_function_int(t):
A1 = 0.33
A2 = 0.33
A3 = 0.34
tau1 = 5
tau2 = 1
tau3 = 0.1
return A1 * np.exp(-t/tau1) + A2 * np.exp(-t/tau2) + A3 * np.exp(-t/tau3)
hbond.solution['time'] = time = np.arange(0, 6.0, 0.01)
hbond.solution['results'] = actual_function_int(time)
hbond.solve()
assert_almost_equal(
hbond.solution['fit'],
np.array([0.33, 0.33, 5, 1, 0.1]),
)
# setup errors
def test_wronglength_DA(self, u, hydrogens, oxygens, nitrogens):
with pytest.raises(ValueError):
HBAC(u,
hydrogens=hydrogens[:-1],
acceptors=oxygens,
donors=nitrogens,
bond_type='intermittent',
exclusions=(np.arange(len(hydrogens)), np.array(
range(len(oxygens)))),
sample_time=0.06,
)
def test_exclusions(self, u, hydrogens, oxygens, nitrogens):
excl_list = (np.array(range(len(hydrogens))), np.array(
range(len(oxygens))))
excl_list2 = excl_list[0], excl_list[1][:-1]
with pytest.raises(ValueError):
HBAC(u,
hydrogens=hydrogens,
acceptors=oxygens,
donors=nitrogens,
bond_type='intermittent',
exclusions=excl_list2,
sample_time=0.06,
)
def test_bond_type_VE(self, u, hydrogens, oxygens, nitrogens):
with pytest.raises(ValueError):
HBAC(u,
hydrogens=hydrogens,
acceptors=oxygens,
donors=nitrogens,
bond_type='marzipan',
exclusions=(np.arange(len(hydrogens)), np.array(range(
len(oxygens)))),
sample_time=0.06,
)
def test_solve_before_run_VE(self, u, hydrogens, oxygens, nitrogens):
hbond = HBAC(u,
hydrogens=hydrogens,
acceptors=oxygens,
donors=nitrogens,
bond_type='continuous',
sample_time=0.06,
)
with pytest.raises(ValueError):
hbond.solve()
@mock.patch('MDAnalysis.coordinates.TRZ.TRZReader._read_frame')
def test_unslicable_traj_VE(self, mock_read, u, hydrogens, oxygens, nitrogens):
mock_read.side_effect = TypeError
with pytest.raises(ValueError):
HBAC(
u,
hydrogens=hydrogens,
acceptors=oxygens,
donors=nitrogens,
bond_type='continuous',
sample_time=0.06
)
def test_repr(self, u, hydrogens, oxygens, nitrogens):
hbond = HBAC(u,
hydrogens=hydrogens,
acceptors=oxygens,
donors=nitrogens,
bond_type='continuous',
sample_time=0.06,
)
assert isinstance(repr(hbond), str)
def test_find_donors():
u = mda.Universe(waterPSF, waterDCD)
H = u.select_atoms('name H*')
D = find_hydrogen_donors(H)
assert len(H) == len(D)
# check each O is bonded to the corresponding H
for h_atom, o_atom in zip(H, D):
assert o_atom in h_atom.bonded_atoms
def test_donors_nobonds():
u = mda.Universe(XYZ_mini)
with pytest.raises(mda.NoDataError):
find_hydrogen_donors(u.atoms)
|
MDAnalysis/mdanalysis
|
testsuite/MDAnalysisTests/analysis/test_hydrogenbondautocorrel.py
|
Python
|
gpl-2.0
| 9,893
|
[
"MDAnalysis"
] |
96c87afe81c758aca78bc5f5fe67cc0a60ceed5a78c829c35f803777de6616ae
|
# Copyright 2020, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# pytype: skip-file
# This modules disables the Pytype analyzer, see
# https://github.com/tensorflow/federated/blob/main/docs/pytype.md for more
# information.
"""Generic aggregator for model updates in federated averaging."""
import math
from tensorflow_federated.python.aggregators import differential_privacy
from tensorflow_federated.python.aggregators import distributed_dp
from tensorflow_federated.python.aggregators import encoded
from tensorflow_federated.python.aggregators import factory
from tensorflow_federated.python.aggregators import mean
from tensorflow_federated.python.aggregators import quantile_estimation
from tensorflow_federated.python.aggregators import robust
from tensorflow_federated.python.aggregators import secure
from tensorflow_federated.python.learning import debug_measurements
def _default_zeroing(
inner_factory: factory.AggregationFactory,
secure_estimation: bool = False) -> factory.AggregationFactory:
"""The default adaptive zeroing wrapper."""
# Adapts very quickly to a value somewhat higher than the highest values so
# far seen.
zeroing_norm = quantile_estimation.PrivateQuantileEstimationProcess.no_noise(
initial_estimate=10.0,
target_quantile=0.98,
learning_rate=math.log(10.0),
multiplier=2.0,
increment=1.0,
secure_estimation=secure_estimation)
if secure_estimation:
secure_count_factory = secure.SecureSumFactory(
upper_bound_threshold=1, lower_bound_threshold=0)
return robust.zeroing_factory(
zeroing_norm,
inner_factory,
zeroed_count_sum_factory=secure_count_factory)
else:
return robust.zeroing_factory(zeroing_norm, inner_factory)
def _default_clipping(
inner_factory: factory.AggregationFactory,
secure_estimation: bool = False) -> factory.AggregationFactory:
"""The default adaptive clipping wrapper."""
# Adapts relatively quickly to a moderately high norm.
clipping_norm = quantile_estimation.PrivateQuantileEstimationProcess.no_noise(
initial_estimate=1.0,
target_quantile=0.8,
learning_rate=0.2,
secure_estimation=secure_estimation)
if secure_estimation:
secure_count_factory = secure.SecureSumFactory(
upper_bound_threshold=1, lower_bound_threshold=0)
return robust.clipping_factory(
clipping_norm,
inner_factory,
clipped_count_sum_factory=secure_count_factory)
else:
return robust.clipping_factory(clipping_norm, inner_factory)
def robust_aggregator(
*,
zeroing: bool = True,
clipping: bool = True,
weighted: bool = True,
add_debug_measurements: bool = False,
) -> factory.AggregationFactory:
"""Creates aggregator for mean with adaptive zeroing and clipping.
Zeroes out extremely large values for robustness to data corruption on
clients, and clips in the L2 norm to moderately high norm for robustness to
outliers.
For details on clipping and zeroing see `tff.aggregators.clipping_factory`
and `tff.aggregators.zeroing_factory`. For details on the quantile-based
adaptive algorithm see `tff.aggregators.PrivateQuantileEstimationProcess`.
Args:
zeroing: Whether to enable adaptive zeroing for data corruption mitigation.
clipping: Whether to enable adaptive clipping in the L2 norm for robustness.
weighted: Whether the mean is weighted (vs. unweighted).
add_debug_measurements: Whether to add measurements suitable for debugging
learning algorithms. For more detail on these measurements, see
`tff.learning.add_debug_measurements`.
Returns:
A `tff.aggregators.AggregationFactory`.
"""
factory_ = mean.MeanFactory() if weighted else mean.UnweightedMeanFactory()
if add_debug_measurements:
factory_ = debug_measurements.add_debug_measurements(factory_)
if clipping:
factory_ = _default_clipping(factory_)
if zeroing:
factory_ = _default_zeroing(factory_)
return factory_
def dp_aggregator(noise_multiplier: float,
clients_per_round: float,
zeroing: bool = True) -> factory.UnweightedAggregationFactory:
"""Creates aggregator with adaptive zeroing and differential privacy.
Zeroes out extremely large values for robustness to data corruption on
clients, and performs adaptive clipping and addition of Gaussian noise for
differentially private learning. For details of the DP algorithm see McMahan
et. al (2017) https://arxiv.org/abs/1710.06963. The adaptive clipping uses the
geometric method described in Thakkar et al. (2019)
https://arxiv.org/abs/1905.03871.
Args:
noise_multiplier: A float specifying the noise multiplier for the Gaussian
mechanism for model updates. A value of 1.0 or higher may be needed for
meaningful privacy. See above mentioned papers to compute (epsilon, delta)
privacy guarantee.
clients_per_round: A float specifying the expected number of clients per
round. Must be positive.
zeroing: Whether to enable adaptive zeroing for data corruption mitigation.
Returns:
A `tff.aggregators.UnweightedAggregationFactory`.
"""
factory_ = differential_privacy.DifferentiallyPrivateFactory.gaussian_adaptive(
noise_multiplier, clients_per_round)
if zeroing:
factory_ = _default_zeroing(factory_)
return factory_
def compression_aggregator(
*,
zeroing: bool = True,
clipping: bool = True,
weighted: bool = True,
add_debug_measurements: bool = False,
) -> factory.AggregationFactory:
"""Creates aggregator with compression and adaptive zeroing and clipping.
Zeroes out extremely large values for robustness to data corruption on
clients and clips in the L2 norm to moderately high norm for robustness to
outliers. After weighting in mean, the weighted values are uniformly quantized
to reduce the size of the model update communicated from clients to the
server. For details, see Suresh et al. (2017)
http://proceedings.mlr.press/v70/suresh17a/suresh17a.pdf. The default
configuration is chosen such that compression does not have adverse effect on
trained model quality in typical tasks.
Args:
zeroing: Whether to enable adaptive zeroing for data corruption mitigation.
clipping: Whether to enable adaptive clipping in the L2 norm for robustness.
Note this clipping is performed prior to the per-coordinate clipping
required for quantization.
weighted: Whether the mean is weighted (vs. unweighted).
add_debug_measurements: Whether to add measurements suitable for debugging
learning algorithms. For more detail on these measurements, see
`tff.learning.add_debug_measurements`.
Returns:
A `tff.aggregators.AggregationFactory`.
"""
factory_ = encoded.EncodedSumFactory.quantize_above_threshold(
quantization_bits=8, threshold=20000)
factory_ = (
mean.MeanFactory(factory_)
if weighted else mean.UnweightedMeanFactory(factory_))
if add_debug_measurements:
factory_ = debug_measurements.add_debug_measurements(factory_)
if clipping:
factory_ = _default_clipping(factory_)
if zeroing:
factory_ = _default_zeroing(factory_)
return factory_
def secure_aggregator(
*,
zeroing: bool = True,
clipping: bool = True,
weighted: bool = True,
) -> factory.AggregationFactory:
"""Creates secure aggregator with adaptive zeroing and clipping.
Zeroes out extremely large values for robustness to data corruption on
clients, clips to moderately high norm for robustness to outliers. After
weighting in mean, the weighted values are summed using cryptographic protocol
ensuring that the server cannot see individual updates until sufficient number
of updates have been added together. For details, see Bonawitz et al. (2017)
https://dl.acm.org/doi/abs/10.1145/3133956.3133982. In TFF, this is realized
using the `tff.federated_secure_sum_bitwidth` operator.
Args:
zeroing: Whether to enable adaptive zeroing for data corruption mitigation.
clipping: Whether to enable adaptive clipping in the L2 norm for robustness.
Note this clipping is performed prior to the per-coordinate clipping
required for secure aggregation.
weighted: Whether the mean is weighted (vs. unweighted).
Returns:
A `tff.aggregators.AggregationFactory`.
"""
secure_clip_bound = quantile_estimation.PrivateQuantileEstimationProcess.no_noise(
initial_estimate=50.0,
target_quantile=0.95,
learning_rate=1.0,
multiplier=2.0,
secure_estimation=True)
factory_ = secure.SecureSumFactory(secure_clip_bound)
if weighted:
factory_ = mean.MeanFactory(
value_sum_factory=factory_,
# Use a power of 2 minus one to more accurately encode floating dtypes
# that actually contain integer values. 2 ^ 20 gives us approximately a
# range of [0, 1 million]. Existing use cases have the weights either
# all ones, or a variant of number of examples processed locally.
weight_sum_factory=secure.SecureSumFactory(
upper_bound_threshold=float(2**20 - 1), lower_bound_threshold=0.0))
else:
factory_ = mean.UnweightedMeanFactory(
value_sum_factory=factory_,
count_sum_factory=secure.SecureSumFactory(
upper_bound_threshold=1, lower_bound_threshold=0))
if clipping:
factory_ = _default_clipping(factory_, secure_estimation=True)
if zeroing:
factory_ = _default_zeroing(factory_, secure_estimation=True)
return factory_
def ddp_secure_aggregator(
noise_multiplier: float,
expected_clients_per_round: int,
bits: int = 20,
zeroing: bool = True,
rotation_type: str = 'hd') -> factory.UnweightedAggregationFactory:
"""Creates aggregator with adaptive zeroing and distributed DP.
Zeroes out extremely large values for robustness to data corruption on
clients, and performs distributed DP (compression, discrete noising, and
SecAgg) with adaptive clipping for differentially private learning. For
details of the two main distributed DP algorithms see
https://arxiv.org/pdf/2102.06387
or https://arxiv.org/pdf/2110.04995.pdf. The adaptive clipping uses the
geometric method described in https://arxiv.org/abs/1905.03871.
Args:
noise_multiplier: A float specifying the noise multiplier (with respect to
the initial L2 cipping) for the distributed DP mechanism for model
updates. A value of 1.0 or higher may be needed for meaningful privacy.
expected_clients_per_round: An integer specifying the expected number of
clients per round. Must be positive.
bits: An integer specifying the bit-width for the aggregation. Note that
this is for the noisy, quantized aggregate at the server and thus should
account for the `expected_clients_per_round`. Must be in the inclusive
range of [1, 22]. This is set to 20 bits by default, and it dictates the
computational and communication efficiency of Secure Aggregation. Setting
it to less than 20 bits should work fine for most cases. For instance, for
an expected number of securely aggregated client updates of 100, 12 bits
should be enough, and for an expected number of securely aggregated client
updates of 1000, 16 bits should be enough.
zeroing: A bool indicating whether to enable adaptive zeroing for data
corruption mitigation. Defaults to `True`.
rotation_type: A string indicating what rotation to use for distributed DP.
Valid options are 'hd' (Hadamard transform) and 'dft' (discrete Fourier
transform). Defaults to `hd`.
Returns:
A `tff.aggregators.UnweightedAggregationFactory`.
"""
agg_factory = distributed_dp.DistributedDpSumFactory(
noise_multiplier=noise_multiplier,
expected_clients_per_round=expected_clients_per_round,
bits=bits,
l2_clip=0.1,
mechanism='distributed_skellam',
rotation_type=rotation_type,
auto_l2_clip=True)
agg_factory = mean.UnweightedMeanFactory(
value_sum_factory=agg_factory,
count_sum_factory=secure.SecureSumFactory(
upper_bound_threshold=1, lower_bound_threshold=0))
if zeroing:
agg_factory = _default_zeroing(agg_factory, secure_estimation=True)
return agg_factory
|
tensorflow/federated
|
tensorflow_federated/python/learning/model_update_aggregator.py
|
Python
|
apache-2.0
| 12,865
|
[
"Gaussian"
] |
1aba37ca2aa09d01f9f90bc58e3bf7b0289c46d29fbb2f7126533d88bd449a9b
|
from math import fsum, sqrt
from copy import copy
from pyelectro import analysis
from traceHandler import sizeError
import efel
import numpy as np
import scipy
import inspyred
from inspyred import ec
from inspyred.ec import emo
from inspyred.ec import variators
from inspyred.ec import observers
import modelHandler
import time
import random
import threading
import matplotlib.pyplot as plt
try:
import copyreg
except:
import copyreg
from types import MethodType
import os
try:
import cPickle as pickle
except ImportError:
import pickle
def _pickle_method(method):
func_name = method.__func__.__name__
obj = method.__self__
cls = method.__self__.__class__
return _unpickle_method, (func_name, obj, cls)
def _unpickle_method(func_name, obj, cls):
for cls in cls.mro():
try:
func = cls.__dict__[func_name]
except KeyError:
pass
else:
break
return func.__get__(obj, cls)
try:
copyreg.pickle(MethodType, _pickle_method, _unpickle_method)
except:
copyreg.pickle(MethodType, _pickle_method, _unpickle_method)
def frange(start, stop, step):
"""
Generates range of real values.
:param start: beginning of range
:param stop: end of range
:param step: step size between values
"""
r = start
while r < stop:
yield r
r += step
class spike_frame():
"""
Object to represent the important parts of a spike. The parameters stored are the following:
:param start_pos: the index of the starting position
:param start_val: the starting value of the spike
:param peak: the index of the peak value of the spike
:param peak_val: the peak value of the spike
:param stop_pos: the index of the end of the spike
:param stop_val: the value of the end of the spike
"""
def __init__(self, start, start_val, peak, peak_val, stop, stop_val):
self.start_pos = start
self.start_val = start_val
self.peak = peak
self.peak_val = peak_val
self.stop_pos = stop
self.stop_val = stop_val
class spike(spike_frame):
"""
The extension of the ``spike_frame`` class as it contains every point in the spike in addition to the
crucial ones.
"""
def __init(self, start, start_val, peak, peak_val, stop, stop_val, spike):
spike_frame.__init__(self, start, start_val, peak, peak_val, stop, stop_val)
self.s = spike#vector, with the spike in it
class fF(object):
"""
Class encapsulating the implemented error functions.
:param reader_object: a ``traceReader`` object containing the input trace(s)
:param model_object: either ``modelHandlerNeuron`` or ``externalHandler`` object, this performs the model related tasks
:param option_object: an ``optionHandler`` object with the stored settings
Main attributes:
:attr: thres: the spike detection threshold
:attr: calc_dict: contains references to the existing fitness functions, using its names as keys
:attr: user_fun_name: the name of the function defined by the user (optional)
"""
def __init__(self, reader_object, option_object):
self.fitnes = []
self.model_trace = []
self.thres = option_object.spike_thres
#self.d_spike=[]
#self.m_spike=[]
self.model=None
self.option = option_object
self.reader = reader_object
#self.current_pop=0
self.fun_dict = {"Combinations": self.combineFeatures,
"Multiobj": self.MooFeatures,
"Deapwrapper": self.DEAP_wrapper}
self.calc_dict = {"MSE": self.calc_ase,
"MSE (excl. spikes)": self.calc_spike_ase,
"Spike count": self.calc_spike,
"Spike count (stim.)": self.spike_rate,
"ISI differences": self.isi_differ,
"Latency to 1st spike": self.first_spike,
"AP amplitude": self.AP_overshoot,
"AHP depth": self.AHP_depth,
"AP width": self.AP_width,
"Derivative difference" : self.calc_grad_dif}
#"PPTD" : self.pyelectro_pptd
def setParameters(self, section, params):
"""
Sets the specified parameters to the given values. If there is a function defined by the user
it calls that instead.
:param section: ``list`` of strings specifying precisely
("section","channel","channel parameter" or "section" "morphological parameter") the parameter to be set
:param params: ``list`` of real values to be assigned to the parameters
.. note::
The parameters and the values must be in appropriate order and the user must guarantee that
the parameters are in their valid ranges.
"""
if self.option.GetUFunString() == "":
for sec in section:
if len(str.split(sec, " ")) == 4:
self.model.SetChannelParameters(str.strip(str.split(sec, " ")[0]), str.strip(str.split(sec, " ")[1]), str.strip(str.split(sec, " ")[2]), str.strip(str.split(sec, " ")[3]),
params[section.index(sec)])
else:
self.model.SetMorphParameters(str.strip(str.split(sec, " ")[0]), str.strip(str.split(sec, " ")[1]), params[section.index(sec)])
else:
#cal the user def.ed function
usr_fun(self, params)
def modelRunner(self, candidates, act_trace_idx):
"""
Prepares the model for the simulation, runs the simulation and records the appropriate variable.
If an external simulator is used then it writes the parameters to a file, called "params.param"
executes command stored in the ``model`` member and tries to read the model's output from a file,
called "trace.dat".
:param candidates: the new parameter set generated by the optimization algorithm as a ``list`` of real values
:param act_trace_idx: used by the external simulator to select current stimulation protocol
"""
error=0
from modelHandler import externalHandler
if isinstance(self.model, externalHandler):
param_sum=sum(candidates)
pid = str(os.getpid())
current_time = str(time.time() + param_sum).replace('.','')
unique_ID = pid + current_time
self.model.record[0] = []
print('PID ', pid, ' ************')
with open(self.option.base_dir + "/params" + unique_ID + ".param" , "w") as out_handler:
print("CANDIDATES")
print(candidates)
for c in candidates:
out_handler.write(str(c) + "\n")
out_handler.write(str(act_trace_idx))
os.chdir(self.option.base_dir)
from subprocess import call
error=call(self.model.GetExec(unique_ID))
params_file = 'params' + unique_ID + '.param'
os.chdir(self.option.base_dir)
try:
if(params_file != 'params.param'):
os.remove(params_file)
except OSError:
pass
with open(self.option.base_dir + '/trace' + unique_ID + '.dat', "r") as in_handler:
for line in in_handler:
self.model.record[0].append(float(line.split()[-1]))
try:
with open(self.option.base_dir + '/spike' + unique_ID + '.dat', "r") as in_handler:
self.model.spike_times = []
for line in in_handler:
self.model.spike_times.append(int(float(line) / (1000.0 / self.option.input_freq)))
#print self.model.spike_times[1:10]
except OSError:
pass
try:
os.remove(self.option.base_dir + '/trace' + unique_ID + '.dat')
except OSError:
pass
try:
os.remove(self.option.base_dir + '/spike' + unique_ID + '.dat')
except OSError:
pass
else:
settings = self.option.GetModelRun()#1. is the integrating step dt
if self.option.type[-1]!= 'features':
settings.append(self.reader.data.step)
else:
settings.append(0.05)
self.model.RunControll(settings)
return error
def ReNormalize(self, l):
"""
Performs a re-normalization based on the parameter bounds specified in the ``option`` object.
:param l: a ``list`` of real values to be re-normalized
:return: the re-normalized values in a ``list``
"""
tmp = []
for i in range(len(l)):
tmp.append(l[i] * (self.option.boundaries[1][i] - self.option.boundaries[0][i]) + self.option.boundaries[0][i])
return tmp
# spike detection
def detectSpike(self, vect):
"""
Detects spikes in the input using the spike detection threshold ``thres`` and generates ``spike_frames``.
A spike is detected when the input value exceeds the threshold, after some increase, reaches a maximum,
then drops under the threshold. These events (crossing the threshold while rising, maximum, crossing the threshold while droping)
are used in the creation of the ``spike_frame`` instance which will represent the detected spike.
:param vect: the trace as ``list`` of real values
:return: a ``list`` of ``spike_frame`` instances
"""
start_pos = 0
stop_pos = 0
start = 0
temp1 = []
for n in range(len(vect)):
if vect[n] > self.thres and start == 0:
start_pos = n
start = 1
elif vect[n] < self.thres and start == 1:
stop_pos = n
start = 0
s = spike_frame(start_pos, vect[start_pos], start_pos+vect[start_pos:stop_pos].index(max(vect[start_pos:stop_pos])), max(vect[start_pos:stop_pos]), stop_pos, vect[stop_pos])
temp1.append(s)
return temp1
#calculates the gradient at the given time
def calc_grad_dif(self, mod_t, exp_t, args):
"""
Calculates the normalized average squared differences of derivatives of the given traces.
The gradient is calculated as follows:
::
grad_a=((mod_t[i+1]-mod_t[i-1])/(2*dt))
where dt is the step between to points in the trace
:param mod_t: the trace obtained from the model as ``list``
:param exp_t: the input trace as ``list``
:param args: optional arguments as ``dictionary``
:return: the normalized average squared differences of derivatives where the normalization is done
by the squared range of the input trace
"""
dt = self.reader.data.step
grad_a = 0
grad_b = 0
grad_b_list=[]
tmp = []
for i in range(1, min(len(mod_t), len(exp_t)) - 1):
grad_a = ((mod_t[i + 1] - mod_t[i - 1]) / (2 * dt))
grad_b = ((exp_t[i + 1] - exp_t[i - 1]) / (2 * dt))
tmp.append((grad_a - grad_b) ** 2)
grad_b_list.append(grad_b)
try:
if self.option.output_level == "1":
print("grad dif")
print(fsum(tmp) / len(tmp) / (pow(max(grad_b_list) - min(grad_b_list), 2)))
except OverflowError:
return 1
return fsum(tmp) / len(tmp) / (pow(max(grad_b_list) - min(grad_b_list), 2))
#compares the number of spikes in the traces
#counting only traces which are during the stimulus
def spike_rate(self, mod_t, exp_t, args):
"""
Calculates the normalized absolute difference in number of spikes that occur during the time of the stimulus.
:param mod_t: the trace obtained from the model as ``list``
:param exp_t: the input trace as ``list``
:param args: optional arguments as ``dictionary``
:return: the normalized absolute difference in spike numbers
"""
temp_fit = 0
window = int(self.option.spike_window)
stim_dur = self.option.stim_dur
if stim_dur >= 1e9 or stim_dur==0:
stim_dur = self.option.input_length
add_data = args.get("add_data", None)
spikes = [[], []]
if (self.model.spike_times == None):
spikes[0] = self.detectSpike(mod_t[int(self.option.stim_del * self.option.input_freq / 1000):int(self.option.stim_del * self.option.input_freq / 1000 + stim_dur * self.option.input_freq / 1000)])
elif len(self.model.spike_times)!=0:
#print "using spike times"
for sp_t in self.model.spike_times:
start_pos=sp_t-window
if start_pos<0:
start_pos=0
start_val=mod_t[start_pos]
peak_pos=sp_t
peak_val=mod_t[sp_t]
end_pos=sp_t+window
if end_pos>=len(mod_t):
end_pos=len(mod_t)-1
end_val=mod_t[end_pos]
spikes[0].append(spike_frame(start_pos,start_val,peak_pos,peak_val,end_pos,end_val))
if add_data != None:
spikes[1] = add_data
else:
spikes[1] = self.detectSpike(exp_t[int(self.option.stim_del * self.option.input_freq / 1000):int(self.option.stim_del * self.option.input_freq / 1000 + stim_dur * self.option.input_freq / 1000)])
mod_spike = len(spikes[0])
exp_spike = len(spikes[1])
temp_fit += float(abs(mod_spike - exp_spike)) / float(exp_spike + mod_spike + 1)
if self.option.output_level == "1":
print("spike rate:")
print("mod: ", len(spikes[0]))
print("exp: ", len(spikes[1]))
print(temp_fit)
return temp_fit
#compares the two traces based on the
#differences in the interspike intervals (isi)
#normalized
#returns 2 if model trace has no spikes
#The value of k was either
#four ISIs or one-fifth of the total number of ISIs, whichever was the smaller
#of the two
def isi_differ(self, mod_t, exp_t, args):
"""
Calculates the normalized average absolute ISI difference in the two traces.
The first half of the spikes or the first four spikes (whichever is less) are excluded from the calculation.
:param mod_t: the trace obtained from the model as ``list``
:param exp_t: the input trace as ``list``
:param args: optional arguments as ``dictionary``
.. note::
If neither trace contains spikes, the function returns zero.
If one traces has no spikes, but the other has the function returns one.
:return: the normalized average absolute ISI difference
"""
add_data = args.get("add_data", None)
window = int(self.option.spike_window)
spikes = [[], []]
if (self.model.spike_times == None):
spikes[0] = self.detectSpike(mod_t)
elif len(self.model.spike_times)!=0:
#print "using spike times"
for sp_t in self.model.spike_times:
start_pos=sp_t-window
if start_pos<0:
start_pos=0
start_val=mod_t[start_pos]
peak_pos=sp_t
peak_val=mod_t[sp_t]
end_pos=sp_t+window
if end_pos>=len(mod_t):
end_pos=len(mod_t)-1
end_val=mod_t[end_pos]
spikes[0].append(spike_frame(start_pos,start_val,peak_pos,peak_val,end_pos,end_val))
if add_data != None:
spikes[1] = add_data
else:
spikes[1] = self.detectSpike(exp_t)
tmp = []
#tmp.append(abs(len(spikes[0])-len(spikes[1]))/max( float(len(spikes[0])),float(len(spikes[1])-1) ))
if (len(spikes[0]) < 2) and (len(spikes[1]) < 2):
return 0
if (len(spikes[0]) < 2) != (len(spikes[1]) < 2):
return 1
for s in range(min(len(spikes[0]), len(spikes[1])) - 1):
tmp.append(abs((spikes[0][s + 1].peak - spikes[0][s].peak)
- (spikes[1][s + 1].peak - spikes[1][s].peak)))
if len(spikes[0]) > len(spikes[1]):
tmp.append((spikes[0][-1].peak - spikes[0][len(spikes[1])-1].peak))
elif len(spikes[0]) < len(spikes[1]):
tmp.append((spikes[1][-1].peak - spikes[1][len(spikes[0])-1].peak))
if self.option.output_level == "1":
print("isi difference:")
print("mod: ", len(spikes[0]))
print("exp: ", len(spikes[1]))
print(fsum(tmp), " / ", len(exp_t), " = ", fsum(tmp) / len(exp_t))
return fsum(tmp) / len(exp_t)
#compares the two traces based on the latency of the first spikes
def first_spike(self, mod_t, exp_t, args):
"""
Calculates the normalized squared latency differences of the first spikes.
:param mod_t: the trace obtained from the model as ``list``
:param exp_t: the input trace as ``list``
:param args: optional arguments as ``dictionary``
.. note::
If neither trace contains spikes, the function returns zero.
If one traces has no spikes, but the other has the function returns one.
:return: the normalized squared latency differences of the first spikes,
where the normalization is done by the length of the length of ``exp_t``
"""
add_data = args.get("add_data", None)
spikes = [[], []]
window = int(self.option.spike_window)
if (self.model.spike_times == None):
spikes[0] = self.detectSpike(mod_t)
elif len(self.model.spike_times)!=0:
#print "using spike times"
for sp_t in self.model.spike_times:
start_pos=sp_t-window
if start_pos<0:
start_pos=0
start_val=mod_t[start_pos]
peak_pos=sp_t
peak_val=mod_t[sp_t]
end_pos=sp_t+window
if end_pos>=len(mod_t):
end_pos=len(mod_t)-1
end_val=mod_t[end_pos]
spikes[0].append(spike_frame(start_pos,start_val,peak_pos,peak_val,end_pos,end_val))
if add_data != None:
spikes[1] = add_data
else:
spikes[1] = self.detectSpike(exp_t)
if (len(spikes[0]) < 1) and (len(spikes[1]) < 1):
return 0
if (len(spikes[0]) < 1) != (len(spikes[1]) < 1):
return 1
try:
if self.option.output_level == "1":
print("first spike")
print("mod: ", len(spikes[0]))
print("exp: ", len(spikes[1]))
print(float(pow(spikes[0][0].start_pos - spikes[1][0].start_pos, 2)) / (len(exp_t)**2))
except OverflowError:
print("overflow")
return 1
return float(pow(spikes[0][0].start_pos - spikes[1][0].start_pos, 2)) / (len(exp_t)**2)
#compares the traces based on the spike heights (heights calculated as the following:
#abs(peak avlue-spike threshold) )
#normalized
def AP_overshoot(self, mod_t, exp_t, args):
"""
Calculates the normalized average squared differences of AP overshoots. Overshoots are defined
as the difference of the peak value from the threshold.
:param mod_t: the trace obtained from the model as ``list``
:param exp_t: the input trace as ``list``
:param args: optional arguments as ``dictionary``
.. note::
Only the first k common spikes are compared, and there is no penalty for one trace
having more spike than the other.
.. note::
If neither trace contains spikes, the function returns zero.
If one traces has no spikes, but the other has the function returns one.
:return: the normalized average squared differences of AP overshoots where the normalization is
done by the maximal peak value in the input trace
"""
add_data = args.get("add_data", None)
spikes = [[], []]
window = int(self.option.spike_window)
if (self.model.spike_times == None):
spikes[0] = self.detectSpike(mod_t)
elif len(self.model.spike_times)!=0:
#print "using spike times"
for sp_t in self.model.spike_times:
start_pos=sp_t-window
if start_pos<0:
start_pos=0
start_val=mod_t[start_pos]
peak_pos=sp_t
peak_val=mod_t[sp_t]
end_pos=sp_t+window
if end_pos>=len(mod_t):
end_pos=len(mod_t)-1
end_val=mod_t[end_pos]
spikes[0].append(spike_frame(start_pos,start_val,peak_pos,peak_val,end_pos,end_val))
if add_data != None:
spikes[1] = add_data
else:
spikes[1] = self.detectSpike(exp_t)
if (len(spikes[0]) < 1) and (len(spikes[1]) < 1):
return 0
if ((len(spikes[0]) < 1) != (len(spikes[1]) < 1)):
return 1
max_amp = max([x.peak_val - self.thres for x in spikes[1]])
if max_amp == 0:
max_amp = 1e-12
tmp = [pow((s1.peak_val - self.thres) - (s2.peak_val - self.thres), 2) for s1, s2 in zip(spikes[0], spikes[1])]
try:
if self.option.output_level == "1":
print("AP oveshoot:")
print("mod: ", len(spikes[0]))
print("exp: ", len(spikes[1]))
print(fsum(tmp) / len(tmp) / (max_amp**2))
return fsum(tmp) / len(tmp) / (max_amp**2)
except OverflowError:
print("overflow")
return 1
#compares the two traces based on the after-hyperpolarization depth
#basically finds the minimum value between spikes and compares them
#normalized
#calculate average value of the minimum voltage between two APs for both traces,
#take absolute (or squared) difference,
#normalize by (square of) the range of all exp voltage values
#(subthreshold range would be even better, but may be more difficult).
def AHP_depth(self, mod_t, exp_t, args):
"""
Calculates the normalized squared average of the differences in after-hyperpolarization depth.
The AHP-depth is defined as the minimum value between two spikes.
:param mod_t: the trace obtained from the model as ``list``
:param exp_t: the input trace as ``list``
:param args: optional arguments as ``dictionary``
.. note::
If neither trace contains spikes, the function returns zero.
If one traces has no spikes, but the other has the function returns one.
:return: the normalized squared average of the differences in after-hyperpolarization depth,
where the normalization is done by the squared sub-threshold range of the input trace
"""
add_data = args.get("add_data", None)
spikes = [[], []]
window = int(self.option.spike_window)
if (self.model.spike_times == None):
spikes[0] = self.detectSpike(mod_t)
elif len(self.model.spike_times)!=0:
#print "using spike times"
for sp_t in self.model.spike_times:
start_pos=sp_t-window
if start_pos<0:
start_pos=0
start_val=mod_t[start_pos]
peak_pos=sp_t
peak_val=mod_t[sp_t]
end_pos=sp_t+window
if end_pos>=len(mod_t):
end_pos=len(mod_t)-1
end_val=mod_t[end_pos]
spikes[0].append(spike_frame(start_pos,start_val,peak_pos,peak_val,end_pos,end_val))
if add_data != None:
spikes[1] = add_data
else:
spikes[1] = self.detectSpike(exp_t)
if (len(spikes[0]) < 1) and (len(spikes[1]) < 1):
return 0
if (len(spikes[0]) < 1) != (len(spikes[1]) < 1):
return 1
e = []
m = []
for s1, s2 in zip(list(range(len(spikes[0]))), list(range(len(spikes[1])))):
try:
m.append(min(mod_t[spikes[0][s1].stop_pos:spikes[0][s2 + 1].start_pos]))
e.append(min(exp_t[spikes[1][s2].stop_pos:spikes[1][s2 + 1].start_pos]))
except IndexError:
m.append(min(mod_t[spikes[0][s1].stop_pos:]))
e.append(min(exp_t[spikes[1][s2].stop_pos:]))
avg_e = fsum(e) / len(e)
avg_m = fsum(m) / len(m)
sub_t_e = [x for x in exp_t if x < self.thres]
try:
if self.option.output_level == "1":
print("AHP depth:")
print("mod: ", len(spikes[0]))
print("exp: ", len(spikes[1]))
print(pow(avg_e - avg_m, 2) / pow(max(sub_t_e) - min(sub_t_e), 2))
except OverflowError:
return 1
tmp = pow(avg_e - avg_m, 2) / pow(max(sub_t_e) - min(sub_t_e), 2)
return tmp
#compares the traces based on the width of the action potentials
#the width is computed at the base of the spike and at the middle of the spike
#not normalized
def AP_width(self, mod_t, exp_t, args):
"""
Calculates the normalized squared average differences of the width of APs.
The width is defined as follows:
::
(s1.stop_pos-s1.start_pos)/2
where s1 is a spike instance
:param mod_t: the trace obtained from the model as ``list``
:param exp_t: the input trace as ``list``
:param args: optional arguments as ``dictionary``
.. note::
If neither trace contains spikes, the function returns zero.
If one traces has no spikes, but the other has the function returns one.
:return: the normalized squared average differences of the width of APs, where the normalization
is done by the average spike width of the input trace
"""
add_data = args.get("add_data", None)
spikes = [[], []]
spikes[0] = self.detectSpike(mod_t)
if add_data != None:
spikes[1] = add_data
else:
spikes[1] = self.detectSpike(exp_t)
if (len(spikes[0]) < 1) and (len(spikes[1]) < 1):
return 0
if (len(spikes[0]) < 1) != (len(spikes[1]) < 1):
return 1
avg1 = []
avg2 = []
for s1, s2 in zip(spikes[0], spikes[1]):
avg1.append((s1.stop_pos - s1.start_pos) / 2)
avg2.append((s2.stop_pos - s2.start_pos) / 2)
try:
if self.option.output_level == "1":
print("mod: ", len(spikes[0]))
print("exp: ", len(spikes[1]))
print("AP width:")
print(pow((fsum(avg2) / len(avg2) - fsum(avg1) / len(avg1)) / (fsum(avg2) / len(avg2)), 2))
except OverflowError:
print("overflow")
return 1
return pow((fsum(avg2) / len(avg2) - fsum(avg1) / len(avg1)) / (fsum(avg2) / len(avg2)), 2)
#calculates the averaged squared error's of the close proximity of spikes
def calc_spike_ase(self, mod_t, exp_t, args):
"""
Calculates the normalized average squared differences of the sub-threshold segments of the traces.
:param mod_t: the trace obtained from the model as ``list``
:param exp_t: the input trace as ``list``
:param args: optional arguments as ``dictionary``
:return: the normalized average squared differences (for details, see calc_ase)
"""
add_data = args.get("add_data", None)
#tmp=[]
spikes = [[], []]
window = int(self.option.spike_window)
if (self.model.spike_times == None):
spikes[0] = self.detectSpike(mod_t)
elif len(self.model.spike_times)!=0:
#print "using spike times"
for sp_t in self.model.spike_times:
start_pos=sp_t-window
if start_pos<0:
start_pos=0
start_val=mod_t[start_pos]
peak_pos=sp_t
peak_val=mod_t[sp_t]
end_pos=sp_t+window
if end_pos>=len(mod_t):
end_pos=len(mod_t)-1
end_val=mod_t[end_pos]
spikes[0].append(spike_frame(start_pos,start_val,peak_pos,peak_val,end_pos,end_val))
if add_data != None:
spikes[1] = add_data
else:
spikes[1] = self.detectSpike(exp_t)
e = copy(mod_t)
m = copy(exp_t)
if (len(spikes[1]) < 1) and (len(spikes[0]) < 1):
return self.calc_ase(mod_t, exp_t, args)
for s_e in spikes[1]:
e[int(s_e.start_pos - window):int(s_e.stop_pos + window)] = [0] * (int(s_e.stop_pos + window) - int(s_e.start_pos - window))
m[int(s_e.start_pos - window):int(s_e.stop_pos + window)] = [0] * (int(s_e.stop_pos + window) - int(s_e.start_pos - window))
for s_m in spikes[0]:
m[int(s_m.start_pos - window):int(s_m.stop_pos + window)] = [0] * (int(s_m.stop_pos + window) - int(s_m.start_pos - window))
e[int(s_m.start_pos - window):int(s_m.stop_pos + window)] = [0] * (int(s_m.stop_pos + window) - int(s_m.start_pos - window))
# tmp.append(self.calc_ase(a[0:spikes[1][0].start_pos-window],
# b[0:spikes[1][0].start_pos-window],args))
# for i,s in enumerate(spikes[1]):
# try:
# tmp.append(self.calc_ase(a[s.stop_pos+window:spikes[1][i+1].start_pos-window],
# b[s.stop_pos+window:spikes[1][i+1].start_pos-window],args ))
# except IndexError:
# tmp.append(self.calc_ase(a[spikes[1][i].stop_pos+window:],b[spikes[1][i].stop_pos+window:],args ))
# print fsum(tmp)/len(tmp)
if self.option.output_level == "1":
print("spike_ase")
print("mod: ", len(spikes[0]))
print("exp: ", len(spikes[1]))
print(self.calc_ase(m, e, args))
return self.calc_ase(m, e, args)
def calc_ase(self, mod_t, exp_t, args):
"""
Calculates the normalized average squared difference of the traces.
:param mod_t: the trace obtained from the model as ``list``
:param exp_t: the input trace as ``list``
:param args: optional arguments as ``dictionary``
:return: the normalized average squared difference, where the normalization is done by
the squared range of the input trace
"""
temp = []
for n in range(min([len(exp_t), len(mod_t)])):
try:
temp.append(pow(exp_t[n] - mod_t[n], 2))
except OverflowError:
return 1
#except TypeError:
# return 1
try:
if self.option.output_level == "1":
print("ase")
print(fsum(temp) / len(temp) / (pow(max(exp_t) - min(exp_t), 2)))
except OverflowError:
return 1
return fsum(temp) / len(temp) / (pow(max(exp_t) - min(exp_t), 2))
def calc_spike(self, mod_t, exp_t, args):
"""
Calculates the normalized absolute differences of the number of spikes in the traces.
:param mod_t: the trace obtained from the model as ``list``
:param exp_t: the input trace as ``list``
:param args: optional arguments as ``dictionary``
:return: the normalized absolute differences of the number of spikes, where the normalization is done
by the sum of the number of spikes in both traces plus one
"""
add_data = args.get("add_data", None)
temp_fit = 0
spikes = [[], []]
if (self.model.spike_times == None):
spikes[0] = self.detectSpike(mod_t)
else:
#only the number of spikes is needed (e.g the length of the timing vector)
spikes[0] = self.model.spike_times
#print spikes[0]
if add_data != None:
spikes[1] = add_data
else:
spikes[1] = self.detectSpike(exp_t)
mod_spike = len(spikes[0])
exp_spike = len(spikes[1])
try:
#temp_fit+=float(abs(mod_spike-exp_spike))/max( float(exp_spike),float(mod_spike-1) )
temp_fit += float(abs(mod_spike - exp_spike)) / float(exp_spike + mod_spike + 1)
except ZeroDivisionError:
temp_fit += 1
if self.option.output_level == "1":
print("spike count")
print("mod: ", mod_spike)
print("exp: ", exp_spike)
print(temp_fit)
return temp_fit
def pyelectro_pptd(self, mod_t, exp_t, args):
"""
Returns error function value from comparison of two phase
pptd maps as described by Van Geit 2007.
:param mod_t: the trace obtained from the model as ``list``
:param exp_t: the input trace as ``list``
:param args: optional arguments as ``dictionary``
:return: resulting fitness value
"""
t_gen = frange(0, self.option.run_controll_tstop + self.option.run_controll_dt, self.option.run_controll_dt)
t = []
for n in t_gen:
t.append(n)
t = t[0:len(exp_t)]
mod_t = mod_t[0:len(exp_t)]
try:
error = analysis.pptd_error(t, mod_t, t, exp_t, dvdt_threshold=None)
#normalised_error = analysis.normalised_cost_function(error, 0.001)
Q = 0.001 # from earlier code - its effect should be tested
#if Q==None:
#Q=7/(300*(target**2))
normalised_error=1-1/(Q*(error)**2+1)
return normalised_error
except ValueError:
return 1
def get_efel_values(self, traces, feature):
import warnings
warnings.filterwarnings("ignore", category=RuntimeWarning)
traces_results = efel.getFeatureValues(traces,[feature])
warnings.filterwarnings("default", category=RuntimeWarning)
return traces_results
def FFun_for_Features(self, mod_t, features_data, feature, k, args={}):
exp_mean = 0
exp_std = 0
# generating time trace for efel
t_gen = frange(0, self.option.run_controll_tstop + self.option.run_controll_dt, self.option.run_controll_dt)
t = []
for n in t_gen:
t.append(n)
t = t[0:len(mod_t)]
# converting list to numpy array for efel
mod_t_np=np.array(mod_t)
t_np=np.array(t)
temp_fit=0
trace = {}
traces=[]
trace['T'] = t_np
trace['V'] = mod_t_np
trace['stim_start'] = [self.option.stim_del]
trace['stim_end'] = [self.option.stim_del + self.option.stim_dur]
traces.append(trace)
efel.setThreshold(self.thres)
traces_results = self.get_efel_values(traces, feature)
exp_mean = features_data[feature]["mean"][k]
exp_std = features_data[feature]["std"][k]
mod_result=traces_results[0][feature]
if mod_result is not None and mod_result.size > 1 and (feature == 'AP_rise_time' or feature == 'AP_amplitude' or feature == 'AP_duration_half_width' or feature == 'AP_begin_voltage' or feature == 'AP_rise_rate'):
mod_result = scipy.mean(mod_result[1:])
elif mod_result is not None and mod_result.size > 1:
mod_result = scipy.mean(mod_result) #for features (AHP_depth, AP_duration_half_width) that gives a list as a result, the mean of the results is used
elif mod_result is not None and mod_result.size!=0: # mod_result is a numpy array with one element, 0 element for AP1_amp or APlast_amp if no AP generated by the model
mod_result=mod_result[0]
if (mod_result == None or mod_result.size==0) and (exp_mean != None and exp_std != None):
temp_fit=250
elif exp_mean == None and exp_std == None :
temp_fit=0
else:
result = abs(exp_mean - mod_result) / exp_std
temp_fit = result
return temp_fit
def combineFeatures(self, candidates, args={}, delete_model=True):
"""
Creates the weighted combination of fitness functions and calculates the combined fitness for every
set of parameters created during the optimization proccess by seting the model parameters,
running the simulation and evaluating the resulting trace. The selected fitness functions and the
weights are determined from the ``option`` object.
determined
:param candidates: the candidates generated by the algorithm as a ``list`` of ``lists``
:param args: optional arguments
.. note::
If additional information is loaded as well, then it's passed to the fitness functions along with
the actual data traces.
:return: the ``list`` of fitness values corresponding to the parameter sets
"""
self.model_trace = []
self.fitnes = []
features = self.option.feats
weigths = self.option.weights
temp_fit = 0
if self.option.type[-1]!= 'features':
window = int(self.option.spike_window)
else:
window=None
if(self.option.simulator == 'Neuron'):
"Instantiate a model class"
self.model=modelHandler.modelHandlerNeuron(self.option.model_path,self.option.model_spec_dir,self.option.base_dir)
self.model.hoc_obj.dt=self.option.GetModelRun()[1]
else:
self.model=modelHandler.externalHandler(self.option.GetSimParam()[1])
self.model.SetNParams(self.option)
try:
s = self.option.GetUFunString()
s = str.replace(s, "h.", "self.model.hoc_obj.")
exec(compile(str.replace(s, "h(", "self.model.hoc_obj("), '<string>', 'exec'))
self.usr_fun_name = self.option.GetUFunString().split("\n")[4][self.option.GetUFunString().split("\n")[4].find(" ") + 1:self.option.GetUFunString().split("\n")[4].find("(")]
global usr_fun
usr_fun = locals()[self.usr_fun_name]
except SyntaxError:
print("Your function contained syntax errors!! Please fix them!")
except IndexError:
pass
section = self.option.GetObjTOOpt()
if self.option.type[-1]!= 'features':
k_range=self.reader.number_of_traces()
else:
k_range=len(self.reader.features_data["stim_amp"])
for l in candidates:
if self.option.output_level == "1":
print(l)
l = self.ReNormalize(l)
if(self.option.simulator == 'Neuron'):
self.setParameters(section, l)
self.model.CreateStimuli(self.option.GetModelStim())
if self.option.output_level == "1":
print(l)
for k in range(k_range): #for k in range(self.reader.number_of_traces()):
try:
add_data = [spike_frame(n - window, self.thres, n, 1, n + window, self.thres) for n in self.reader.additional_data.get(k)]
except AttributeError:
add_data = None
args = {}
args["add_data"] = add_data
param = self.option.GetModelStimParam()
parameter = param
parameter[0] = param[0][k]
if isinstance(parameter[0], str):
self.model.SetCustStimuli(parameter)
else:
extra_param = self.option.GetModelRun()
self.model.SetStimuli(parameter, extra_param)
if (not self.modelRunner(l,k)):
self.model_trace.append(self.model.record[0])
if self.option.output_level == "1":
print(features, weigths)
if (self.option.type[-1]!='features'):
for f, w in zip(features, weigths):
if abs(len(self.model.record[0])-len(self.reader.data.GetTrace(k)))>1:
raise sizeError("model: " + str(len(self.model.record[0])) + ", target: " + str(len(self.reader.data.GetTrace(k))))
temp_fit += w * (f(self.model.record[0],
self.reader.data.GetTrace(k), args))
else:
for f, w in zip(features, weigths):
temp_fit += w * self.FFun_for_Features(self.model.record[0],
self.reader.features_data, f, k, args)
else:
temp_fit=100
self.fitnes.append(temp_fit)
if self.option.output_level == "1":
print("current fitness: ",temp_fit)
temp_fit = 0
if(self.option.simulator == 'Neuron') and delete_model:
"Deletes the reference of the instance"
del self.model
return self.fitnes
def getErrorComponents(self, index_of_trace, model_output):
"""
Creates the components of the fitness value for a pair of traces using the fitness functions
and the weigths specified in the ``option`` object.
:param index_of_trace: the index of the input trace (in case of multiple traces)
:param model_output: the model trace as ``list``
:return: a ``list`` containing the weight, the function instance, and the component's fitness value
for every function instance i.e every component
"""
features = self.option.feats
weigths = self.option.weights
fit_list = []
window = self.option.spike_window
try:
add_data = [spike_frame(n - window, 0, n, 1, n + 50, 0) for n in self.reader.additional_data.get(index_of_trace)]
except AttributeError:
add_data = None
args = {}
args["add_data"] = add_data
if (self.option.type[-1]!='features'):
for f, w in zip(features, weigths):
fit_list.append([w, f, (f(model_output, self.reader.data.GetTrace(index_of_trace), args))])
else:
for f, w in zip(features, weigths):
fit_list.append([w, f, self.FFun_for_Features(model_output,
self.reader.features_data, f, index_of_trace, args)])# index_of_trace is index of stim_amp here
return fit_list
def MooFeatures(self, candidates, args={}, delete_model=True):
"""
Creates the combination of fitness functions and calculates the fitness for every
set of parameters created during the optimization proccess by seting the model parameters,
running the simulation and evaluating the resulting trace. The selected fitness functions and the
weights are determined from the ``option`` object.
determined
:param candidates: the candidates generated by the algorithm as a ``list`` of ``lists``
:param args: optional arguments
.. note::
If additional information is loaded as well, then it's passed to the fitness functions along with
the actual data traces.
:return: the ``list`` of fitness values corresponding to the parameter sets
"""
self.fitnes = []
features = self.option.feats
weigths = self.option.weights
temp_fit = []
if self.option.type[-1]!= 'features':
window = int(self.option.spike_window)
else:
window=None
if(self.option.simulator == 'Neuron'):
"Instantiate a model class"
self.model=modelHandler.modelHandlerNeuron(self.option.model_path,self.option.model_spec_dir,self.option.base_dir)
self.model.hoc_obj.dt=self.option.GetModelRun()[1]
try:
s = self.option.GetUFunString()
s = str.replace(s, "h.", "self.model.hoc_obj.")
exec(compile(str.replace(s, "h(", "self.model.hoc_obj("), '<string>', 'exec'))
self.usr_fun_name = self.option.GetUFunString().split("\n")[4][self.option.GetUFunString().split("\n")[4].find(" ") + 1:self.option.GetUFunString().split("\n")[4].find("(")]
global usr_fun
usr_fun = locals()[self.usr_fun_name]
except SyntaxError:
print("Your function contained syntax errors!! Please fix them!")
except IndexError:
pass
section = self.option.GetObjTOOpt()
if self.option.type[-1]!= 'features':
k_range=self.reader.number_of_traces()
else:
k_range=len(self.reader.features_data["stim_amp"])
for l in candidates:
if self.option.output_level == "1":
print(l)
l = self.ReNormalize(l)
self.setParameters(section, l)
self.model.CreateStimuli(self.option.GetModelStim())
if self.option.output_level == "1":
print(l)
for k in range(k_range): #for k in range(self.reader.number_of_traces()):
try:
add_data = [spike_frame(n - window, self.thres, n, 1, n + window, self.thres) for n in self.reader.additional_data.get(k)]
except AttributeError:
add_data = None
args = {}
args["add_data"] = add_data
param = self.option.GetModelStimParam()
parameter = param
parameter[0] = param[0][k]
if isinstance(parameter[0], str):
self.model.SetCustStimuli(parameter)
else:
extra_param = self.option.GetModelRun()
self.model.SetStimuli(parameter, extra_param)
if (not self.modelRunner(l,k)):
if self.option.output_level == "1":
print(features, weigths)
if (self.option.type[-1]!='features'):
for f, w in zip(features, weigths):
if abs(len(self.model.record[0])-len(self.reader.data.GetTrace(k)))>1:
raise sizeError("model: " + str(len(self.model.record[0])) + ", target: " + str(len(self.reader.data.GetTrace(k))))
temp_fit.append((f(self.model.record[0],
self.reader.data.GetTrace(k), args)))
else:
for f, w in zip(features, weigths):
temp_fit.append(self.FFun_for_Features(self.model.record[0],
self.reader.features_data, f, k, args))
else:
temp_fit.append(0)
self.fitnes.append(ec.emo.Pareto(tuple(temp_fit)))
if self.option.output_level == "1":
print("current fitness: ",temp_fit)
del temp_fit[:]
if(self.option.simulator == 'Neuron') and delete_model:
"Deletes the reference of the instance"
del self.model
print(self.fitnes)
return self.fitnes
def DEAP_wrapper(self,candidates,args={}, delete_model=True):
self.fitnes = []
features = self.option.feats
weigths = self.option.weights
temp_fit = []
if self.option.type[-1]!= 'features':
window = int(self.option.spike_window)
else:
window=None
if(self.option.simulator == 'Neuron'):
"Instantiate a model class"
self.model=modelHandler.modelHandlerNeuron(self.option.model_path,self.option.model_spec_dir,self.option.base_dir)
self.model.hoc_obj.dt=self.option.GetModelRun()[1]
try:
s = self.option.GetUFunString()
s = str.replace(s, "h.", "self.model.hoc_obj.")
exec(compile(str.replace(s, "h(", "self.model.hoc_obj("), '<string>', 'exec'))
self.usr_fun_name = self.option.GetUFunString().split("\n")[4][self.option.GetUFunString().split("\n")[4].find(" ") + 1:self.option.GetUFunString().split("\n")[4].find("(")]
global usr_fun
usr_fun = locals()[self.usr_fun_name]
except SyntaxError:
print("Your function contained syntax errors!! Please fix them!")
except IndexError:
pass
section = self.option.GetObjTOOpt()
if self.option.type[-1]!= 'features':
k_range=self.reader.number_of_traces()
else:
k_range=len(self.reader.features_data["stim_amp"])
for l in candidates:
if self.option.output_level == "1":
print(l)
l = self.ReNormalize(l)
self.setParameters(section, l)
self.model.CreateStimuli(self.option.GetModelStim())
if self.option.output_level == "1":
print(l)
for k in range(k_range): #for k in range(self.reader.number_of_traces()):
try:
add_data = [spike_frame(n - window, self.thres, n, 1, n + window, self.thres) for n in self.reader.additional_data.get(k)]
except AttributeError:
add_data = None
args = {}
args["add_data"] = add_data
param = self.option.GetModelStimParam()
parameter = param
parameter[0] = param[0][k]
if isinstance(parameter[0], str):
self.model.SetCustStimuli(parameter)
else:
extra_param = self.option.GetModelRun()
self.model.SetStimuli(parameter, extra_param)
if (not self.modelRunner(l,k)):
if self.option.output_level == "1":
print(features, weigths)
if (self.option.type[-1]!='features'):
for f, w in zip(features, weigths):
if abs(len(self.model.record[0])-len(self.reader.data.GetTrace(k)))>1:
raise sizeError("model: " + str(len(self.model.record[0])) + ", target: " + str(len(self.reader.data.GetTrace(k))))
temp_fit.append((f(self.model.record[0],
self.reader.data.GetTrace(k), args)))
else:
for f, w in zip(features, weigths):
temp_fit.append(self.FFun_for_Features(self.model.record[0],
self.reader.features_data, f, k, args))
else:
temp_fit.append(0)
self.fitnes.append(list(temp_fit))
if self.option.output_level == "1":
print("current fitness: ",temp_fit)
del temp_fit[:]
if(self.option.simulator == 'Neuron') and delete_model:
"Deletes the reference of the instance"
del self.model
return sum(self.fitnes,[])
|
KaliLab/optimizer
|
optimizer/fitnessFunctions.py
|
Python
|
lgpl-2.1
| 52,265
|
[
"ASE",
"NEURON"
] |
6c612f9fba107e0b53f39023a2336e0a5742a38962a93bd580950808a7551e91
|
########################################################################
#
# (C) 2013, James Cammarata <jcammarata@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
########################################################################
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os.path
import re
import shutil
import sys
import time
import yaml
from jinja2 import Environment, FileSystemLoader
import ansible.constants as C
from ansible.cli import CLI
from ansible.errors import AnsibleError, AnsibleOptionsError
from ansible.galaxy import Galaxy
from ansible.galaxy.api import GalaxyAPI
from ansible.galaxy.login import GalaxyLogin
from ansible.galaxy.role import GalaxyRole
from ansible.galaxy.token import GalaxyToken
from ansible.module_utils._text import to_text
from ansible.playbook.role.requirement import RoleRequirement
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class GalaxyCLI(CLI):
'''command to manage Ansible roles in shared repositories, the default of which is Ansible Galaxy *https://galaxy.ansible.com*.'''
SKIP_INFO_KEYS = ("name", "description", "readme_html", "related", "summary_fields", "average_aw_composite", "average_aw_score", "url")
VALID_ACTIONS = ("delete", "import", "info", "init", "install", "list", "login", "remove", "search", "setup")
def __init__(self, args):
self.api = None
self.galaxy = None
super(GalaxyCLI, self).__init__(args)
def set_action(self):
super(GalaxyCLI, self).set_action()
# specific to actions
if self.action == "delete":
self.parser.set_usage("usage: %prog delete [options] github_user github_repo")
self.parser.set_description("Removes the role from Galaxy. It does not remove or alter the actual GitHub repository.")
elif self.action == "import":
self.parser.set_usage("usage: %prog import [options] github_user github_repo")
self.parser.set_description("Import a role.")
self.parser.add_option('--no-wait', dest='wait', action='store_false', default=True, help='Don\'t wait for import results.')
self.parser.add_option('--branch', dest='reference',
help='The name of a branch to import. Defaults to the repository\'s default branch (usually master)')
self.parser.add_option('--role-name', dest='role_name', help='The name the role should have, if different than the repo name')
self.parser.add_option('--status', dest='check_status', action='store_true', default=False,
help='Check the status of the most recent import request for given github_user/github_repo.')
elif self.action == "info":
self.parser.set_usage("usage: %prog info [options] role_name[,version]")
self.parser.set_description("View more details about a specific role.")
elif self.action == "init":
self.parser.set_usage("usage: %prog init [options] role_name")
self.parser.set_description("Initialize new role with the base structure of a role.")
self.parser.add_option('--init-path', dest='init_path', default="./",
help='The path in which the skeleton role will be created. The default is the current working directory.')
self.parser.add_option('--type', dest='role_type', action='store', default='default',
help="Initialize using an alternate role type. Valid types include: 'container', 'apb' and 'network'.")
self.parser.add_option('--role-skeleton', dest='role_skeleton', default=C.GALAXY_ROLE_SKELETON,
help='The path to a role skeleton that the new role should be based upon.')
elif self.action == "install":
self.parser.set_usage("usage: %prog install [options] [-r FILE | role_name(s)[,version] | scm+role_repo_url[,version] | tar_file(s)]")
self.parser.set_description("Install Roles from file(s), URL(s) or tar file(s)")
self.parser.add_option('-i', '--ignore-errors', dest='ignore_errors', action='store_true', default=False,
help='Ignore errors and continue with the next specified role.')
self.parser.add_option('-n', '--no-deps', dest='no_deps', action='store_true', default=False, help='Don\'t download roles listed as dependencies')
self.parser.add_option('-r', '--role-file', dest='role_file', help='A file containing a list of roles to be imported')
self.parser.add_option('-g', '--keep-scm-meta', dest='keep_scm_meta', action='store_true',
default=False, help='Use tar instead of the scm archive option when packaging the role')
elif self.action == "remove":
self.parser.set_usage("usage: %prog remove role1 role2 ...")
self.parser.set_description("Delete a role from roles_path.")
elif self.action == "list":
self.parser.set_usage("usage: %prog list [role_name]")
self.parser.set_description("Show the name and version of each role installed in the roles_path.")
elif self.action == "login":
self.parser.set_usage("usage: %prog login [options]")
self.parser.set_description("Login to api.github.com server in order to use ansible-galaxy sub command such as 'import', 'delete' and 'setup'.")
self.parser.add_option('--github-token', dest='token', default=None, help='Identify with github token rather than username and password.')
elif self.action == "search":
self.parser.set_usage("usage: %prog search [searchterm1 searchterm2] [--galaxy-tags galaxy_tag1,galaxy_tag2] [--platforms platform1,platform2] "
"[--author username]")
self.parser.add_option('--platforms', dest='platforms', help='list of OS platforms to filter by')
self.parser.add_option('--galaxy-tags', dest='galaxy_tags', help='list of galaxy tags to filter by')
self.parser.add_option('--author', dest='author', help='GitHub username')
self.parser.set_description("Search the Galaxy database by tags, platforms, author and multiple keywords.")
elif self.action == "setup":
self.parser.set_usage("usage: %prog setup [options] source github_user github_repo secret")
self.parser.add_option('--remove', dest='remove_id', default=None,
help='Remove the integration matching the provided ID value. Use --list to see ID values.')
self.parser.add_option('--list', dest="setup_list", action='store_true', default=False, help='List all of your integrations.')
self.parser.set_description("Manage the integration between Galaxy and the given source.")
# options that apply to more than one action
if self.action in ['init', 'info']:
self.parser.add_option('--offline', dest='offline', default=False, action='store_true', help="Don't query the galaxy API when creating roles")
if self.action not in ("delete", "import", "init", "login", "setup"):
# NOTE: while the option type=str, the default is a list, and the
# callback will set the value to a list.
self.parser.add_option('-p', '--roles-path', dest='roles_path', action="callback", callback=CLI.unfrack_paths, default=C.DEFAULT_ROLES_PATH,
help='The path to the directory containing your roles. The default is the roles_path configured in your ansible.cfg'
' file (/etc/ansible/roles if not configured)', type='str')
if self.action in ("init", "install"):
self.parser.add_option('-f', '--force', dest='force', action='store_true', default=False, help='Force overwriting an existing role')
def parse(self):
''' create an options parser for bin/ansible '''
self.parser = CLI.base_parser(
usage="usage: %%prog [%s] [--help] [options] ..." % "|".join(self.VALID_ACTIONS),
epilog="\nSee '%s <command> --help' for more information on a specific command.\n\n" % os.path.basename(sys.argv[0]),
desc="Perform various Role related operations.",
)
# common
self.parser.add_option('-s', '--server', dest='api_server', default=C.GALAXY_SERVER, help='The API server destination')
self.parser.add_option('-c', '--ignore-certs', action='store_true', dest='ignore_certs', default=C.GALAXY_IGNORE_CERTS,
help='Ignore SSL certificate validation errors.')
self.set_action()
super(GalaxyCLI, self).parse()
display.verbosity = self.options.verbosity
self.galaxy = Galaxy(self.options)
def run(self):
super(GalaxyCLI, self).run()
self.api = GalaxyAPI(self.galaxy)
self.execute()
def exit_without_ignore(self, rc=1):
"""
Exits with the specified return code unless the
option --ignore-errors was specified
"""
if not self.options.ignore_errors:
raise AnsibleError('- you can use --ignore-errors to skip failed roles and finish processing the list.')
def _display_role_info(self, role_info):
text = [u"", u"Role: %s" % to_text(role_info['name'])]
text.append(u"\tdescription: %s" % role_info.get('description', ''))
for k in sorted(role_info.keys()):
if k in self.SKIP_INFO_KEYS:
continue
if isinstance(role_info[k], dict):
text.append(u"\t%s:" % (k))
for key in sorted(role_info[k].keys()):
if key in self.SKIP_INFO_KEYS:
continue
text.append(u"\t\t%s: %s" % (key, role_info[k][key]))
else:
text.append(u"\t%s: %s" % (k, role_info[k]))
return u'\n'.join(text)
############################
# execute actions
############################
def execute_init(self):
"""
creates the skeleton framework of a role that complies with the galaxy metadata format.
"""
init_path = self.options.init_path
force = self.options.force
role_skeleton = self.options.role_skeleton
role_name = self.args.pop(0).strip() if self.args else None
if not role_name:
raise AnsibleOptionsError("- no role name specified for init")
role_path = os.path.join(init_path, role_name)
if os.path.exists(role_path):
if os.path.isfile(role_path):
raise AnsibleError("- the path %s already exists, but is a file - aborting" % role_path)
elif not force:
raise AnsibleError("- the directory %s already exists."
"you can use --force to re-initialize this directory,\n"
"however it will reset any main.yml files that may have\n"
"been modified there already." % role_path)
inject_data = dict(
role_name=role_name,
author='your name',
description='your description',
company='your company (optional)',
license='license (GPLv2, CC-BY, etc)',
issue_tracker_url='http://example.com/issue/tracker',
min_ansible_version='2.4',
role_type=self.options.role_type
)
# create role directory
if not os.path.exists(role_path):
os.makedirs(role_path)
if role_skeleton is not None:
skeleton_ignore_expressions = C.GALAXY_ROLE_SKELETON_IGNORE
else:
role_skeleton = self.galaxy.default_role_skeleton_path
skeleton_ignore_expressions = ['^.*/.git_keep$']
role_skeleton = os.path.expanduser(role_skeleton)
skeleton_ignore_re = [re.compile(x) for x in skeleton_ignore_expressions]
template_env = Environment(loader=FileSystemLoader(role_skeleton))
for root, dirs, files in os.walk(role_skeleton, topdown=True):
rel_root = os.path.relpath(root, role_skeleton)
in_templates_dir = rel_root.split(os.sep, 1)[0] == 'templates'
dirs[:] = [d for d in dirs if not any(r.match(d) for r in skeleton_ignore_re)]
for f in files:
filename, ext = os.path.splitext(f)
if any(r.match(os.path.join(rel_root, f)) for r in skeleton_ignore_re):
continue
elif ext == ".j2" and not in_templates_dir:
src_template = os.path.join(rel_root, f)
dest_file = os.path.join(role_path, rel_root, filename)
template_env.get_template(src_template).stream(inject_data).dump(dest_file)
else:
f_rel_path = os.path.relpath(os.path.join(root, f), role_skeleton)
shutil.copyfile(os.path.join(root, f), os.path.join(role_path, f_rel_path))
for d in dirs:
dir_path = os.path.join(role_path, rel_root, d)
if not os.path.exists(dir_path):
os.makedirs(dir_path)
display.display("- %s was created successfully" % role_name)
def execute_info(self):
"""
prints out detailed information about an installed role as well as info available from the galaxy API.
"""
if len(self.args) == 0:
# the user needs to specify a role
raise AnsibleOptionsError("- you must specify a user/role name")
roles_path = self.options.roles_path
data = ''
for role in self.args:
role_info = {'path': roles_path}
gr = GalaxyRole(self.galaxy, role)
install_info = gr.install_info
if install_info:
if 'version' in install_info:
install_info['intalled_version'] = install_info['version']
del install_info['version']
role_info.update(install_info)
remote_data = False
if not self.options.offline:
remote_data = self.api.lookup_role_by_name(role, False)
if remote_data:
role_info.update(remote_data)
if gr.metadata:
role_info.update(gr.metadata)
req = RoleRequirement()
role_spec = req.role_yaml_parse({'role': role})
if role_spec:
role_info.update(role_spec)
data = self._display_role_info(role_info)
# FIXME: This is broken in both 1.9 and 2.0 as
# _display_role_info() always returns something
if not data:
data = u"\n- the role %s was not found" % role
self.pager(data)
def execute_install(self):
"""
uses the args list of roles to be installed, unless -f was specified. The list of roles
can be a name (which will be downloaded via the galaxy API and github), or it can be a local .tar.gz file.
"""
role_file = self.options.role_file
if len(self.args) == 0 and role_file is None:
# the user needs to specify one of either --role-file or specify a single user/role name
raise AnsibleOptionsError("- you must specify a user/role name or a roles file")
no_deps = self.options.no_deps
force = self.options.force
roles_left = []
if role_file:
try:
f = open(role_file, 'r')
if role_file.endswith('.yaml') or role_file.endswith('.yml'):
try:
required_roles = yaml.safe_load(f.read())
except Exception as e:
raise AnsibleError("Unable to load data from the requirements file: %s" % role_file)
if required_roles is None:
raise AnsibleError("No roles found in file: %s" % role_file)
for role in required_roles:
if "include" not in role:
role = RoleRequirement.role_yaml_parse(role)
display.vvv("found role %s in yaml file" % str(role))
if "name" not in role and "scm" not in role:
raise AnsibleError("Must specify name or src for role")
roles_left.append(GalaxyRole(self.galaxy, **role))
else:
with open(role["include"]) as f_include:
try:
roles_left += [
GalaxyRole(self.galaxy, **r) for r in
(RoleRequirement.role_yaml_parse(i) for i in yaml.safe_load(f_include))
]
except Exception as e:
msg = "Unable to load data from the include requirements file: %s %s"
raise AnsibleError(msg % (role_file, e))
else:
display.deprecated("going forward only the yaml format will be supported", version="2.6")
# roles listed in a file, one per line
for rline in f.readlines():
if rline.startswith("#") or rline.strip() == '':
continue
display.debug('found role %s in text file' % str(rline))
role = RoleRequirement.role_yaml_parse(rline.strip())
roles_left.append(GalaxyRole(self.galaxy, **role))
f.close()
except (IOError, OSError) as e:
raise AnsibleError('Unable to open %s: %s' % (role_file, str(e)))
else:
# roles were specified directly, so we'll just go out grab them
# (and their dependencies, unless the user doesn't want us to).
for rname in self.args:
role = RoleRequirement.role_yaml_parse(rname.strip())
roles_left.append(GalaxyRole(self.galaxy, **role))
for role in roles_left:
# only process roles in roles files when names matches if given
if role_file and self.args and role.name not in self.args:
display.vvv('Skipping role %s' % role.name)
continue
display.vvv('Processing role %s ' % role.name)
# query the galaxy API for the role data
if role.install_info is not None:
if role.install_info['version'] != role.version or force:
if force:
display.display('- changing role %s from %s to %s' %
(role.name, role.install_info['version'], role.version or "unspecified"))
role.remove()
else:
display.warning('- %s (%s) is already installed - use --force to change version to %s' %
(role.name, role.install_info['version'], role.version or "unspecified"))
continue
else:
if not force:
display.display('- %s is already installed, skipping.' % str(role))
continue
try:
installed = role.install()
except AnsibleError as e:
display.warning("- %s was NOT installed successfully: %s " % (role.name, str(e)))
self.exit_without_ignore()
continue
# install dependencies, if we want them
if not no_deps and installed:
if not role.metadata:
display.warning("Meta file %s is empty. Skipping dependencies." % role.path)
else:
role_dependencies = role.metadata.get('dependencies') or []
for dep in role_dependencies:
display.debug('Installing dep %s' % dep)
dep_req = RoleRequirement()
dep_info = dep_req.role_yaml_parse(dep)
dep_role = GalaxyRole(self.galaxy, **dep_info)
if '.' not in dep_role.name and '.' not in dep_role.src and dep_role.scm is None:
# we know we can skip this, as it's not going to
# be found on galaxy.ansible.com
continue
if dep_role.install_info is None:
if dep_role not in roles_left:
display.display('- adding dependency: %s' % str(dep_role))
roles_left.append(dep_role)
else:
display.display('- dependency %s already pending installation.' % dep_role.name)
else:
if dep_role.install_info['version'] != dep_role.version:
display.warning('- dependency %s from role %s differs from already installed version (%s), skipping' %
(str(dep_role), role.name, dep_role.install_info['version']))
else:
display.display('- dependency %s is already installed, skipping.' % dep_role.name)
if not installed:
display.warning("- %s was NOT installed successfully." % role.name)
self.exit_without_ignore()
return 0
def execute_remove(self):
"""
removes the list of roles passed as arguments from the local system.
"""
if len(self.args) == 0:
raise AnsibleOptionsError('- you must specify at least one role to remove.')
for role_name in self.args:
role = GalaxyRole(self.galaxy, role_name)
try:
if role.remove():
display.display('- successfully removed %s' % role_name)
else:
display.display('- %s is not installed, skipping.' % role_name)
except Exception as e:
raise AnsibleError("Failed to remove role %s: %s" % (role_name, str(e)))
return 0
def execute_list(self):
"""
lists the roles installed on the local system or matches a single role passed as an argument.
"""
if len(self.args) > 1:
raise AnsibleOptionsError("- please specify only one role to list, or specify no roles to see a full list")
if len(self.args) == 1:
# show only the request role, if it exists
name = self.args.pop()
gr = GalaxyRole(self.galaxy, name)
if gr.metadata:
install_info = gr.install_info
version = None
if install_info:
version = install_info.get("version", None)
if not version:
version = "(unknown version)"
# show some more info about single roles here
display.display("- %s, %s" % (name, version))
else:
display.display("- the role %s was not found" % name)
else:
# show all valid roles in the roles_path directory
roles_path = self.options.roles_path
path_found = False
for path in roles_path:
role_path = os.path.expanduser(path)
if not os.path.exists(role_path):
display.warning("- the configured path %s does not exist." % role_path)
continue
elif not os.path.isdir(role_path):
display.warning("- the configured path %s, exists, but it is not a directory." % role_path)
continue
path_files = os.listdir(role_path)
path_found = True
for path_file in path_files:
gr = GalaxyRole(self.galaxy, path_file)
if gr.metadata:
install_info = gr.install_info
version = None
if install_info:
version = install_info.get("version", None)
if not version:
version = "(unknown version)"
display.display("- %s, %s" % (path_file, version))
if not path_found:
raise AnsibleOptionsError("- None of the provided paths was usable. Please specify a valid path with --roles-path")
return 0
def execute_search(self):
''' searches for roles on the Ansible Galaxy server'''
page_size = 1000
search = None
if len(self.args):
terms = []
for i in range(len(self.args)):
terms.append(self.args.pop())
search = '+'.join(terms[::-1])
if not search and not self.options.platforms and not self.options.galaxy_tags and not self.options.author:
raise AnsibleError("Invalid query. At least one search term, platform, galaxy tag or author must be provided.")
response = self.api.search_roles(search, platforms=self.options.platforms,
tags=self.options.galaxy_tags, author=self.options.author, page_size=page_size)
if response['count'] == 0:
display.display("No roles match your search.", color=C.COLOR_ERROR)
return True
data = [u'']
if response['count'] > page_size:
data.append(u"Found %d roles matching your search. Showing first %s." % (response['count'], page_size))
else:
data.append(u"Found %d roles matching your search:" % response['count'])
max_len = []
for role in response['results']:
max_len.append(len(role['username'] + '.' + role['name']))
name_len = max(max_len)
format_str = u" %%-%ds %%s" % name_len
data.append(u'')
data.append(format_str % (u"Name", u"Description"))
data.append(format_str % (u"----", u"-----------"))
for role in response['results']:
data.append(format_str % (u'%s.%s' % (role['username'], role['name']), role['description']))
data = u'\n'.join(data)
self.pager(data)
return True
def execute_login(self):
"""
verify user's identify via Github and retrieve an auth token from Ansible Galaxy.
"""
# Authenticate with github and retrieve a token
if self.options.token is None:
if C.GALAXY_TOKEN:
github_token = C.GALAXY_TOKEN
else:
login = GalaxyLogin(self.galaxy)
github_token = login.create_github_token()
else:
github_token = self.options.token
galaxy_response = self.api.authenticate(github_token)
if self.options.token is None and C.GALAXY_TOKEN is None:
# Remove the token we created
login.remove_github_token()
# Store the Galaxy token
token = GalaxyToken()
token.set(galaxy_response['token'])
display.display("Successfully logged into Galaxy as %s" % galaxy_response['username'])
return 0
def execute_import(self):
""" used to import a role into Ansible Galaxy """
colors = {
'INFO': 'normal',
'WARNING': C.COLOR_WARN,
'ERROR': C.COLOR_ERROR,
'SUCCESS': C.COLOR_OK,
'FAILED': C.COLOR_ERROR,
}
if len(self.args) < 2:
raise AnsibleError("Expected a github_username and github_repository. Use --help.")
github_repo = to_text(self.args.pop(), errors='surrogate_or_strict')
github_user = to_text(self.args.pop(), errors='surrogate_or_strict')
if self.options.check_status:
task = self.api.get_import_task(github_user=github_user, github_repo=github_repo)
else:
# Submit an import request
task = self.api.create_import_task(github_user, github_repo, reference=self.options.reference, role_name=self.options.role_name)
if len(task) > 1:
# found multiple roles associated with github_user/github_repo
display.display("WARNING: More than one Galaxy role associated with Github repo %s/%s." % (github_user, github_repo),
color='yellow')
display.display("The following Galaxy roles are being updated:" + u'\n', color=C.COLOR_CHANGED)
for t in task:
display.display('%s.%s' % (t['summary_fields']['role']['namespace'], t['summary_fields']['role']['name']), color=C.COLOR_CHANGED)
display.display(u'\nTo properly namespace this role, remove each of the above and re-import %s/%s from scratch' % (github_user, github_repo),
color=C.COLOR_CHANGED)
return 0
# found a single role as expected
display.display("Successfully submitted import request %d" % task[0]['id'])
if not self.options.wait:
display.display("Role name: %s" % task[0]['summary_fields']['role']['name'])
display.display("Repo: %s/%s" % (task[0]['github_user'], task[0]['github_repo']))
if self.options.check_status or self.options.wait:
# Get the status of the import
msg_list = []
finished = False
while not finished:
task = self.api.get_import_task(task_id=task[0]['id'])
for msg in task[0]['summary_fields']['task_messages']:
if msg['id'] not in msg_list:
display.display(msg['message_text'], color=colors[msg['message_type']])
msg_list.append(msg['id'])
if task[0]['state'] in ['SUCCESS', 'FAILED']:
finished = True
else:
time.sleep(10)
return 0
def execute_setup(self):
""" Setup an integration from Github or Travis for Ansible Galaxy roles"""
if self.options.setup_list:
# List existing integration secrets
secrets = self.api.list_secrets()
if len(secrets) == 0:
# None found
display.display("No integrations found.")
return 0
display.display(u'\n' + "ID Source Repo", color=C.COLOR_OK)
display.display("---------- ---------- ----------", color=C.COLOR_OK)
for secret in secrets:
display.display("%-10s %-10s %s/%s" % (secret['id'], secret['source'], secret['github_user'],
secret['github_repo']), color=C.COLOR_OK)
return 0
if self.options.remove_id:
# Remove a secret
self.api.remove_secret(self.options.remove_id)
display.display("Secret removed. Integrations using this secret will not longer work.", color=C.COLOR_OK)
return 0
if len(self.args) < 4:
raise AnsibleError("Missing one or more arguments. Expecting: source github_user github_repo secret")
secret = self.args.pop()
github_repo = self.args.pop()
github_user = self.args.pop()
source = self.args.pop()
resp = self.api.add_secret(source, github_user, github_repo, secret)
display.display("Added integration for %s %s/%s" % (resp['source'], resp['github_user'], resp['github_repo']))
return 0
def execute_delete(self):
""" Delete a role from Ansible Galaxy. """
if len(self.args) < 2:
raise AnsibleError("Missing one or more arguments. Expected: github_user github_repo")
github_repo = self.args.pop()
github_user = self.args.pop()
resp = self.api.delete_role(github_user, github_repo)
if len(resp['deleted_roles']) > 1:
display.display("Deleted the following roles:")
display.display("ID User Name")
display.display("------ --------------- ----------")
for role in resp['deleted_roles']:
display.display("%-8s %-15s %s" % (role.id, role.namespace, role.name))
display.display(resp['status'])
return True
|
hryamzik/ansible
|
lib/ansible/cli/galaxy.py
|
Python
|
gpl-3.0
| 33,565
|
[
"Galaxy"
] |
9a8dec4d12fce7b156128b2df8f82a110efef710104c43cf647c270d8c3222f5
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Reditools(PythonPackage):
"""REDItools: python scripts for RNA editing detection by RNA-Seq data.
REDItools are simple python scripts conceived to facilitate the
investigation of RNA editing at large-scale and devoted to research groups
that would to explore such phenomenon in own data but don't have sufficient
bioinformatics skills. They work on main operating systems (although
unix/linux-based OS are preferred), can handle reads from whatever platform
in the standard BAM format and implement a variety of filters."""
homepage = "https://github.com/BioinfoUNIBA/REDItools"
git = "https://github.com/BioinfoUNIBA/REDItools.git"
maintainers = ['glennpj']
version('1.3_2020-08-03', commit='2dc71277a25e667797c363d1fca22726249774a3')
version('1.3_2020-03-20', commit='cf47f3d54f324aeb9650bcf8bfacf5a967762a55')
variant('nature_protocol', default=False,
description='Install the Nature Protocol scripts and files')
depends_on('py-reindent', type='build', when='^python@3:')
depends_on('blat', type='run')
depends_on('py-fisher', type='run')
depends_on('py-numpy', type='run')
depends_on('py-pandas', type='run')
depends_on('py-pysam', type='run')
depends_on('py-scipy', type='run')
depends_on('tabix', type='run')
# Nature Protocol
depends_on('bcftools', type='run', when='+nature_protocol')
depends_on('bedtools2', type='run', when='+nature_protocol')
depends_on('bwa', type='run', when='+nature_protocol')
depends_on('bzip2', type='run', when='+nature_protocol')
depends_on('fastp', type='run', when='+nature_protocol')
depends_on('fastqc', type='run', when='+nature_protocol')
depends_on('git', type='run', when='+nature_protocol')
depends_on('gmap-gsnap', type='run', when='+nature_protocol')
depends_on('htslib', type='run', when='+nature_protocol')
depends_on('libdeflate', type='run', when='+nature_protocol')
depends_on('py-bx-python', type='run', when='+nature_protocol')
depends_on('py-rseqc', type='run', when='+nature_protocol')
depends_on('samtools', type='run', when='+nature_protocol')
depends_on('star', type='run', when='+nature_protocol')
depends_on('wget', type='run', when='+nature_protocol')
patch('interpreter.patch')
patch('setup.py.patch')
patch('batch_sort.patch', when='^python@3:')
@run_before('build')
def p2_to_p3(self):
if '^python@3:' in self.spec:
# clean up space/tab mixing
reindent = which('reindent')
reindent('--nobackup', '--recurse', '.')
# convert to be python3 compatible
p2_to_p3 = which('2to3')
p2_to_p3('--nobackups', '--write', '.')
@run_after('install')
def nature_protocol(self):
if '+nature_protocol' in self.spec:
mkdirp(prefix.NPfiles)
install_tree('NPfiles', prefix.NPfiles)
ignore_files = [
'conda_pckg_installer_docker.py',
'conda_pckgs_installer.py',
'download-prepare-data-NP_docker.py',
]
docker_conda = lambda p: p in ignore_files
install_tree('NPscripts', prefix.bin, ignore=docker_conda)
|
LLNL/spack
|
var/spack/repos/builtin/packages/reditools/package.py
|
Python
|
lgpl-2.1
| 3,487
|
[
"BWA",
"pysam"
] |
522470655cdecc1f80bacb080e8d27d8cdb4d3c2317dc355b24ee5ea8eb843e0
|
'''
This is the primary module for user-interaction with the :mod:`hmf` package.
The module contains a single class, `MassFunction`, which wraps almost all the
functionality of :mod:`hmf` in an easy-to-use way.
'''
version = '1.4.6'
###############################################################################
# Some Imports
###############################################################################
from scipy.interpolate import InterpolatedUnivariateSpline as spline
import scipy.integrate as intg
import numpy as np
from numpy import sin, cos, tan, abs, arctan, arccos, arcsin, exp
import copy
import logging
import cosmolopy as cp
import tools
from fitting_functions import Fits
from transfer import Transfer
#===============================================================================
# Logger
#===============================================================================
logger = logging.getLogger('hmf')
class MassFunction(object):
"""
An object containing all relevant quantities for the mass function.
The purpose of this class is to calculate many quantities associated with
the dark matter halo mass function (HMF). The class is initialized to form a
cosmology and takes in various options as to how to calculate all
further quantities.
All required outputs are provided as ``@property`` attributes for ease of
access.
Contains an update() method which can be passed arguments to update, in the
most optimal manner. All output quantities are calculated only when needed
(but stored after first calculation for quick access).
Quantities related to the transfer function can be accessed through the
``transfer`` property of this object.
Parameters
----------
M : array_like, optional, default ``np.linspace(10,15,501)``
The masses at which to perform analysis [units :math:`\log_{10}M_\odot h^{-1}`].
mf_fit : str or callable, optional, default ``"SMT"``
A string indicating which fitting function to use for :math:`f(\sigma)`
Available options:
1. ``'PS'``: Press-Schechter form from 1974
#. ``'ST'``: Sheth-Mo-Tormen empirical fit 2001 (deprecated!)
#. ``'SMT'``: Sheth-Mo-Tormen empirical fit from 2001
#. ``'Jenkins'``: Jenkins empirical fit from 2001
#. ``'Warren'``: Warren empirical fit from 2006
#. ``'Reed03'``: Reed empirical from 2003
#. ``'Reed07'``: Reed empirical from 2007
#. ``'Tinker'``: Tinker empirical from 2008
#. ``'Watson'``: Watson empirical 2012
#. ``'Watson_FoF'``: Watson Friend-of-friend fit 2012
#. ``'Crocce'``: Crocce 2010
#. ``'Courtin'``: Courtin 2011
#. ``'Angulo'``: Angulo 2012
#. ``'Angulo_Bound'``: Angulo sub-halo function 2012
#. ``'Bhattacharya'``: Bhattacharya empirical fit 2011
#. ``'Behroozi'``: Behroozi extension to Tinker for high-z 2013
Alternatively, one may define a callable function, with the signature
``func(self)``, where ``self`` is a :class:`MassFunction` object (and
has access to all its attributes). This may be passed here.
delta_wrt : str, {``"mean"``, ``"crit"``}
Defines what the overdensity of a halo is with respect to, mean density
of the universe, or critical density.
delta_h : float, optional, default ``200.0``
The overdensity for the halo definition, with respect to ``delta_wrt``
user_fit : str, optional, default ``""``
A string defining a mathematical function in terms of `x`, used as
the fitting function, where `x` is taken as :math:`\( \sigma \)`. Will only
be applicable if ``mf_fit == "user_model"``.
cut_fit : bool, optional, default ``True``
Whether to forcibly cut :math:`f(\sigma)` at bounds in literature.
If false, will use whole range of `M`.
delta_c : float, default ``1.686``
The critical overdensity for collapse, :math:`\delta_c`
kwargs : keywords
These keyword arguments are sent to the `hmf.transfer.Transfer` class.
Included are all the cosmological parameters (see the docs for details).
"""
def __init__(self, M=None, mf_fit="ST", delta_h=200.0,
delta_wrt='mean', cut_fit=True, z2=None, nz=None,
delta_c=1.686, mv_scheme="trapz", **kwargs):
"""
Initializes some parameters
"""
if M is None:
M = np.linspace(10, 15, 501)
# A list of all available kwargs (sent to Cosmology via Transfer)
self._cp = ["sigma_8", "n", "w", "cs2_lam", "t_cmb", "y_he", "N_nu",
"omegan", "H0", "h", "omegab",
"omegac", "omegav", "omegab_h2", "omegac_h2",
"force_flat", "default"]
# Set up a simple dictionary of kwargs which can be later updated
self._cpdict = {k:v for k, v in kwargs.iteritems() if k in self._cp}
# Set all given parameters.
self.mf_fit = mf_fit
self.M = M
self.delta_h = delta_h
self.delta_wrt = delta_wrt
self.cut_fit = cut_fit
self.z2 = z2
self.nz = nz
self.delta_c = delta_c
self.transfer = Transfer(**kwargs)
self.mv_scheme = mv_scheme
tools.check_kr(self.M[0], self.M[-1], self.cosmo.mean_dens,
self.transfer.lnk[0], self.transfer.lnk[-1])
def update(self, **kwargs):
"""
Update the class with the given arguments in an optimal manner.
Accepts any argument that the constructor takes.
"""
for key, val in kwargs.iteritems():
# The following takes care of everything specifically in this class
if "_MassFunction__" + key in self.__dict__:
try: doset = np.any(getattr(self, key) != val)
except ValueError: doset = not np.array_equal(getattr(self, key), val)
if doset:
setattr(self, key, val)
# We need to handle deletes in this class by parameters in Transfer here
if key is 'z':
if val != self.transfer.z:
del self.sigma
# All parameters being sent to Transfer:
the_rest = {k:v for k, v in kwargs.iteritems() if "_MassFunction__" + k not in self.__dict__}
# Some things are basically deleted when anything in Transfer is updated
if len(the_rest) > 0:
del self.delta_halo
if len(the_rest) > 1 or (len(the_rest) == 1 and 'z' not in the_rest):
del self._sigma_0
# The rest are sent to the Transfer class (stupid values weeded out there)
self.transfer.update(**the_rest)
tools.check_kr(self.M[0], self.M[-1], self.cosmo.mean_dens,
self.transfer.lnk[0], self.transfer.lnk[-1])
# --- SET PROPERTIES -------------------------------------------------------
@property
def M(self):
return self.__M
@M.setter
def M(self, val):
try:
if len(val) == 1:
raise ValueError("M must be a sequence of length > 1")
except TypeError:
raise TypeError("M must be a sequence of length > 1")
if np.any(np.abs(np.diff(val, 2)) > 1e-5) or val[1] < val[0]:
raise ValueError("M must be a linearly increasing vector! " + str(val[0]) + " " + str(val[1]))
# Delete stuff dependent on it
del self._sigma_0
self.__M = 10 ** val
@property
def delta_c(self):
return self.__delta_c
@delta_c.setter
def delta_c(self, val):
try:
val = float(val)
except ValueError:
raise ValueError("delta_c must be a number: ", val)
if val <= 0:
raise ValueError("delta_c must be > 0 (", val, ")")
if val > 10.0:
raise ValueError("delta_c must be < 10.0 (", val, ")")
self.__delta_c = val
del self.fsigma
@property
def mv_scheme(self):
return self.__mv_scheme
@mv_scheme.setter
def mv_scheme(self, val):
if val not in ['trapz', 'simps', 'romb']:
raise ValueError("mv_scheme wrong")
else:
self.__mv_scheme = val
del self._sigma_0
@property
def mf_fit(self):
return self.__mf_fit
@mf_fit.setter
def mf_fit(self, val):
# mf_fit may be a callable or a string. Try callable first.
try:
val(self)
except:
try:
val = str(val)
except:
raise ValueError("mf_fit must be a string or callable, got ", val)
if val not in Fits.mf_fits + ["Behroozi"]:
raise ValueError("mf_fit is not in the list of available fitting functions: ", val)
# Also delete stuff dependent on it
del self.fsigma
self.__mf_fit = val
@property
def delta_h(self):
return self.__delta_h
@delta_h.setter
def delta_h(self, val):
try:
val = float(val)
except ValueError:
raise ValueError("delta_halo must be a number: ", val)
if val <= 0:
raise ValueError("delta_halo must be > 0 (", val, ")")
if val > 10000:
raise ValueError("delta_halo must be < 10,000 (", val, ")")
self.__delta_h = val
# Delete stuff dependent on it
del self.delta_halo
@property
def delta_wrt(self):
return self.__delta_wrt
@delta_wrt.setter
def delta_wrt(self, val):
if val not in ['mean', 'crit']:
raise ValueError("delta_wrt must be either 'mean' or 'crit' (", val, ")")
self.__delta_wrt = val
del self.delta_halo
@property
def z2(self):
return self.__z2
@z2.setter
def z2(self, val):
if val is None:
self.__z2 = val
return
try:
val = float(val)
except ValueError:
raise ValueError("z must be a number (", val, ")")
if val <= self.transfer.z:
raise ValueError("z2 must be larger than z")
else:
self.__z2 = val
del self.dndm
@property
def nz(self):
return self.__nz
@nz.setter
def nz(self, val):
if val is None:
self.__nz = val
return
try:
val = int(val)
except ValueError:
raise ValueError("nz must be an integer")
if val < 1:
raise ValueError("nz must be >= 1")
else:
self.__nz = val
del self.dndm
@property
def cut_fit(self):
return self.__cut_fit
@cut_fit.setter
def cut_fit(self, val):
if not isinstance(val, bool):
raise ValueError("cut_fit must be a bool, " + str(val))
del self.fsigma
self.__cut_fit = val
#-------------------------------- START NON-SET PROPERTIES ----------------------------------------------
@property
def cosmo(self):
""" :class:`hmf.cosmo.Cosmology` object aliased from `self.transfer.cosmo`"""
return self.transfer.cosmo
@property
def delta_halo(self):
""" Overdensity of a halo w.r.t mean density"""
try:
return self.__delta_halo
except:
if self.delta_wrt == 'mean':
self.__delta_halo = self.delta_h
elif self.delta_wrt == 'crit':
self.__delta_halo = self.delta_h / cp.density.omega_M_z(self.transfer.z, **self.cosmo.cosmolopy_dict())
return self.__delta_halo
@delta_halo.deleter
def delta_halo(self):
try:
del self.__delta_halo
del self.fsigma
except:
pass
@property
def _sigma_0(self):
"""
The normalised mass variance at z=0 :math:`\sigma`
Notes
-----
.. math:: \sigma^2(R) = \frac{1}{2\pi^2}\int_0^\infty{k^2P(k)W^2(kR)dk}
"""
try:
return self.__sigma_0
except:
self.__sigma_0 = tools.mass_variance(self.M, self.transfer._lnP_0,
self.transfer.lnk,
self.cosmo.mean_dens,
self.mv_scheme)
return self.__sigma_0
@_sigma_0.deleter
def _sigma_0(self):
try:
del self.__sigma_0
del self._dlnsdlnm
del self.sigma
except:
pass
@property
def _dlnsdlnm(self):
"""
The value of :math:`\left|\frac{\d \ln \sigma}{\d \ln M}\right|`, ``len=len(M)``
Notes
-----
.. math:: frac{d\ln\sigma}{d\ln M} = \frac{3}{2\sigma^2\pi^2R^4}\int_0^\infty \frac{dW^2(kR)}{dM}\frac{P(k)}{k^2}dk
"""
try:
return self.__dlnsdlnm
except:
self.__dlnsdlnm = tools.dlnsdlnm(self.M, self._sigma_0, self.transfer._lnP_0,
self.transfer.lnk,
self.cosmo.mean_dens)
return self.__dlnsdlnm
@_dlnsdlnm.deleter
def _dlnsdlnm(self):
try:
del self.__dlnsdlnm
del self.dndm
del self.n_eff
except:
pass
@property
def sigma(self):
"""
The mass variance at `z`, ``len=len(M)``
"""
try:
return self.__sigma
except:
self.__sigma = self._sigma_0 * self.transfer.growth
return self.__sigma
@sigma.deleter
def sigma(self):
try:
del self.__sigma
del self.fsigma
del self.lnsigma
except:
pass
@property
def lnsigma(self):
"""
Natural log of inverse mass variance, ``len=len(M)``
"""
try:
return self.__lnsigma
except:
self.__lnsigma = np.log(1 / self.sigma)
return self.__lnsigma
@lnsigma.deleter
def lnsigma(self):
try:
del self.__lnsigma
del self.fsigma
except:
pass
@property
def n_eff(self):
"""
Effective spectral index at scale of halo radius, ``len=len(M)``
"""
try:
return self.__n_eff
except:
self.__n_eff = tools.n_eff(self._dlnsdlnm)
return self.__n_eff
@n_eff.deleter
def n_eff(self):
try:
del self.__n_eff
except:
pass
@property
def fsigma(self):
"""
The multiplicity function, :math:`f(\sigma)`, for `mf_fit`. ``len=len(M)``
"""
try:
return self.__fsigma
except:
try:
self.__fsigma = self.mf_fit(self)
except:
fits_class = Fits(self)
self.__fsigma = fits_class.nufnu()
if np.sum(np.isnan(self.__fsigma)) > 0.8 * len(self.__fsigma):
# the input mass range is almost completely outside the cut
logger.warning("The specified mass-range was almost entirely \
outside of the limits from the fit. Ignored fit range...")
self.cut_fit = False
try:
self.__fsigma = self.mf_fit(self)
except:
self.__fsigma = fits_class.nufnu()
return self.__fsigma
@fsigma.deleter
def fsigma(self):
try:
del self.__fsigma
del self.dndm
except:
pass
@property
def dndm(self):
"""
The number density of haloes, ``len=len(M)`` [units :math:`h^4 M_\odot^{-1} Mpc^{-3}`]
"""
try:
return self.__dndm
except:
if self.z2 is None: # #This is normally the case
self.__dndm = self.fsigma * self.cosmo.mean_dens * np.abs(self._dlnsdlnm) / self.M ** 2
if self.mf_fit == 'Behroozi':
a = 1 / (1 + self.transfer.z)
theta = 0.144 / (1 + np.exp(14.79 * (a - 0.213))) * (self.M / 10 ** 11.5) ** (0.5 / (1 + np.exp(6.5 * a)))
ngtm_tinker = self._ngtm()
ngtm_behroozi = 10 ** (theta + np.log10(ngtm_tinker))
dthetadM = 0.144 / (1 + np.exp(14.79 * (a - 0.213))) * \
(0.5 / (1 + np.exp(6.5 * a))) * (self.M / 10 ** 11.5) ** \
(0.5 / (1 + np.exp(6.5 * a)) - 1) / (10 ** 11.5)
self.__dndm = self.__dndm * 10 ** theta - ngtm_behroozi * np.log(10) * dthetadM
else: # #This is for a survey-volume weighted calculation
if self.nz is None:
self.nz = 10
zedges = np.linspace(self.transfer.z, self.z2, self.nz)
zcentres = (zedges[:-1] + zedges[1:]) / 2
dndm = np.zeros_like(zcentres)
vol = np.zeros_like(zedges)
vol[0] = cp.distance.comoving_volume(self.transfer.z,
**self.cosmo.cosmolopy_dict())
for i, zz in enumerate(zcentres):
self.update(z=zz)
dndm[i] = self.fsigma * self.cosmo.mean_dens * np.abs(self._dlnsdlnm) / self.M ** 2
if self.mf_fit == 'Behroozi':
a = 1 / (1 + self.transfer.z)
theta = 0.144 / (1 + np.exp(14.79 * (a - 0.213))) * (self.M / 10 ** 11.5) ** (0.5 / (1 + np.exp(6.5 * a)))
ngtm_tinker = self._ngtm()
ngtm_behroozi = 10 ** (theta + np.log10(ngtm_tinker))
dthetadM = 0.144 / (1 + np.exp(14.79 * (a - 0.213))) * (0.5 / (1 + np.exp(6.5 * a))) * (self.M / 10 ** 11.5) ** (0.5 / (1 + np.exp(6.5 * a)) - 1) / (10 ** 11.5)
dndm[i] = dndm[i] * 10 ** theta - ngtm_behroozi * np.log(10) * dthetadM
vol[i + 1] = cp.distance.comoving_volume(z=zedges[i + 1],
**self.cosmo.cosmolopy_dict())
vol = vol[1:] - vol[:-1] # Volume in shells
integrand = vol * dndm
numerator = intg.simps(integrand, x=zcentres)
denom = intg.simps(vol, zcentres)
self.__dndm = numerator / denom
return self.__dndm
@dndm.deleter
def dndm(self):
try:
del self.__dndm
del self.dndlnm
del self.dndlog10m
except:
pass
@property
def dndlnm(self):
"""
The differential mass function in terms of natural log of `M`, ``len=len(M)`` [units :math:`h^3 Mpc^{-3}`]
"""
try:
return self.__dndlnm
except:
self.__dndlnm = self.M * self.dndm
return self.__dndlnm
@dndlnm.deleter
def dndlnm(self):
try:
del self.__dndlnm
del self.ngtm
del self.nltm
del self.mgtm
del self.mltm
del self.how_big
except:
pass
@property
def dndlog10m(self):
"""
The differential mass function in terms of log of `M`, ``len=len(M)`` [units :math:`h^3 Mpc^{-3}`]
"""
try:
return self.__dndlog10m
except:
self.__dndlog10m = self.M * self.dndm * np.log(10)
return self.__dndlog10m
@dndlog10m.deleter
def dndlog10m(self):
try:
del self.__dndlog10m
except:
pass
def _upper_ngtm(self, M, mass_function, cut):
"""Calculate the mass function above given range of `M` in order to integrate"""
### WE CALCULATE THE MASS FUNCTION ABOVE THE COMPUTED RANGE ###
# mass_function is logged already (not log10 though)
m_upper = np.linspace(np.log(M[-1]), np.log(10 ** 18), 500)
if cut: # since its been cut, the best we can do is a power law
mf_func = spline(np.log(M), mass_function, k=1)
mf = mf_func(m_upper)
else:
# We try to calculate the hmf as far as we can normally
new_pert = copy.deepcopy(self)
new_pert.update(M=np.log10(np.exp(m_upper)))
mf = np.log(np.exp(m_upper) * new_pert.dndm)
if np.isnan(mf[-1]): # Then we couldn't get up all the way, so have to do linear ext.
if np.isnan(mf[1]): # Then the whole extension is nan and we have to use the original (start at 1 because 1 val won't work either)
mf_func = spline(np.log(M), mass_function, k=1)
mf = mf_func(m_upper)
else:
mfslice = mf[np.logical_not(np.isnan(mf))]
m_nan = m_upper[np.isnan(mf)]
m_true = m_upper[np.logical_not(np.isnan(mf))]
mf_func = spline(m_true, mfslice, k=1)
mf[len(mfslice):] = mf_func(m_nan)
return m_upper, mf
def _lower_ngtm(self, M, mass_function, cut):
### WE CALCULATE THE MASS FUNCTION BELOW THE COMPUTED RANGE ###
# mass_function is logged already (not log10 though)
m_lower = np.linspace(np.log(10 ** 3), np.log(M[0]), 500)
if cut: # since its been cut, the best we can do is a power law
mf_func = spline(np.log(M), mass_function, k=1)
mf = mf_func(m_lower)
else:
# We try to calculate the hmf as far as we can normally
new_pert = copy.deepcopy(self)
new_pert.update(M=np.log10(np.exp(m_lower)))
mf = np.log(np.exp(m_lower) * new_pert.dndm)
if np.isnan(mf[0]): # Then we couldn't go down all the way, so have to do linear ext.
mfslice = mf[np.logical_not(np.isnan(mf))]
m_nan = m_lower[np.isnan(mf)]
m_true = m_lower[np.logical_not(np.isnan(mf))]
mf_func = spline(m_true, mfslice, k=1)
mf[:len(mfslice)] = mf_func(m_nan)
return m_lower, mf
def _ngtm(self):
"""
Calculate n(>m).
This function is separated from the property because of the Behroozi fit
"""
# set M and mass_function within computed range
M = self.M[np.logical_not(np.isnan(self.dndlnm))]
mass_function = self.dndlnm[np.logical_not(np.isnan(self.dndlnm))]
# Calculate the mass function (and its integral) from the highest M up to 10**18
if M[-1] < 10 ** 18:
m_upper, mf = self._upper_ngtm(M, np.log(mass_function), M[-1] < self.M[-1])
int_upper = intg.simps(np.exp(mf), dx=m_upper[2] - m_upper[1], even='first')
else:
int_upper = 0
# Calculate the cumulative integral (backwards) of mass_function (Adding on the upper integral)
ngtm = np.concatenate((intg.cumtrapz(mass_function[::-1], dx=np.log(M[1]) - np.log(M[0]))[::-1], np.zeros(1))) + int_upper
# We need to set ngtm back in the original length vector with nans where they were originally
if len(ngtm) < len(self.M):
ngtm_temp = np.zeros_like(self.dndlnm)
ngtm_temp[:] = np.nan
ngtm_temp[np.logical_not(np.isnan(self.dndlnm))] = ngtm
ngtm = ngtm_temp
return ngtm
@property
def ngtm(self):
"""
The cumulative mass function above `M`, ``len=len(M)`` [units :math:`h^3 Mpc^{-3}`]
"""
try:
return self.__ngtm
except:
self.__ngtm = self._ngtm()
return self.__ngtm
@ngtm.deleter
def ngtm(self):
try:
del self.__ngtm
del self.how_big
except:
pass
@property
def mgtm(self):
"""
Mass in haloes `>M`, ``len=len(M)`` [units :math:`M_\odot h^2 Mpc^{-3}`]
"""
try:
return self.__mgtm
except:
M = self.M[np.logical_not(np.isnan(self.dndlnm))]
mass_function = self.dndlnm[np.logical_not(np.isnan(self.dndlnm))]
# Calculate the mass function (and its integral) from the highest M up to 10**18
if M[-1] < 10 ** 18:
m_upper, mf = self._upper_ngtm(M, np.log(mass_function), M[-1] < self.M[-1])
int_upper = intg.simps(np.exp(mf + m_upper) , dx=m_upper[2] - m_upper[1], even='first')
else:
int_upper = 0
# Calculate the cumulative integral (backwards) of mass_function (Adding on the upper integral)
self.__mgtm = np.concatenate((intg.cumtrapz(mass_function[::-1] * M[::-1], dx=np.log(M[1]) - np.log(M[0]))[::-1], np.zeros(1))) + int_upper
# We need to set ngtm back in the original length vector with nans where they were originally
if len(self.__mgtm) < len(self.M):
mgtm_temp = np.zeros_like(self.dndlnm)
mgtm_temp[:] = np.nan
mgtm_temp[np.logical_not(np.isnan(self.dndlnm))] = self.__mgtm
self.__mgtm = mgtm_temp
return self.__mgtm
@mgtm.deleter
def mgtm(self):
try:
del self.__mgtm
except:
pass
@property
def nltm(self):
"""
Inverse cumulative mass function, ``len=len(M)`` [units :math:`h^3 Mpc^{-3}`]
"""
try:
return self.__nltm
except:
# set M and mass_function within computed range
M = self.M[np.logical_not(np.isnan(self.dndlnm))]
mass_function = self.dndlnm[np.logical_not(np.isnan(self.dndlnm))]
# Calculate the mass function (and its integral) from 10**3 up to lowest M
if M[0] > 10 ** 3:
m_lower, mf = self._lower_ngtm(M, np.log(mass_function), M[0] > self.M[0])
int_lower = intg.simps(np.exp(mf), dx=m_lower[2] - m_lower[1], even='first')
else:
int_lower = 0
# Calculate the cumulative integral of mass_function (Adding on the lower integral)
self.__nltm = np.concatenate((np.zeros(1), intg.cumtrapz(mass_function, dx=np.log(M[1]) - np.log(M[0])))) + int_lower
# We need to set ngtm back in the original length vector with nans where they were originally
if len(self.__nltm) < len(self.M):
nltm_temp = np.zeros_like(self.dndlnm)
nltm_temp[:] = np.nan
nltm_temp[np.logical_not(np.isnan(self.dndlnm))] = self.__nltm
self.__nltm = nltm_temp
return self.__nltm
@nltm.deleter
def nltm(self):
try:
del self.__nltm
except:
pass
@property
def mltm(self):
"""
Total mass in haloes `<M`, ``len=len(M)`` [units :math:`M_\odot h^2 Mpc^{-3}`]
"""
try:
return self.__mltm
except:
# Set M within calculated range
M = self.M[np.logical_not(np.isnan(self.dndlnm))]
mass_function = self.dndlnm[np.logical_not(np.isnan(self.dndlnm))]
# Calculate the mass function (and its integral) from 10**3 up to lowest M
if M[0] > 10 ** 3:
m_lower, mf = self._lower_ngtm(M, np.log(mass_function), M[0] > self.M[0])
int_lower = intg.simps(np.exp(mf + m_lower), dx=m_lower[2] - m_lower[1], even='first')
else:
int_lower = 0
# Calculate the cumulative integral of mass_function (Adding on the lower integral)
self.__mltm = np.concatenate((np.zeros(1), intg.cumtrapz(mass_function * M, dx=np.log(M[1]) - np.log(M[0])))) + int_lower
# We need to set ngtm back in the original length vector with nans where they were originally
if len(self.__mltm) < len(self.M):
nltm_temp = np.zeros_like(self.dndlnm)
nltm_temp[:] = np.nan
nltm_temp[np.logical_not(np.isnan(self.dndlnm))] = self.__mltm
self.__mltm = nltm_temp
return self.__mltm
@property
def how_big(self):
"""
Size of simulation volume in which to expect one halo of mass M, ``len=len(M)`` [units :math:`Mpch^{-1}`]
"""
return self.ngtm ** (-1. / 3.)
@how_big.deleter
def how_big(self):
try:
del self.how_big
except:
pass
|
tbs1980/hmf
|
hmf/hmf.py
|
Python
|
mit
| 29,047
|
[
"TINKER"
] |
db93d7de9a75276f9ca6866429befca38fc56f368c61b05a3fda60262a1bd8f8
|
"""I/O functions that operate on omics ExpressionSet objects.
Supported:
* I/O from/to Bioconductor ExpressionSet (RData)
* I/O from/to HDF5 storage (Pandas dataframes)
Input:
* RData2ExpressionSet(RData, assay='exprs', fFactors='auto', pFactors='auto')
* HDF52ExpressionSet(HDF5, assay='exprs', fData='fData', pData='pData')
Output:
* ExpressionSet2RData(eSet, RData)
* ExpressionSet2HDF5(eSet, HDF5)
References:
* http://pandas.pydata.org/pandas-docs/stable/r_interface.html
* https://www.bioconductor.org/packages/release/bioc/vignettes/Biobase/inst/doc/ExpressionSetIntroduction.pdf
Todo:
* Add tests.
"""
import readline
import pandas as pd
from rpy2.robjects import r
from rpy2.robjects import pandas2ri
from rpy2.robjects.packages import importr
from ..expression.ExpressionSet import ExpressionSet
__author__ = "Cho-Yi Chen"
__version__ = "2016.10.16"
# ================================================================================
# Auxiliary functions
# ================================================================================
def _read_ExpressionSet_RData(RData):
"""Read ExpressionSet RData to Rpy2 robjects.
RData: Path to the input RData file.
ExpressionSet must be the only object in the RData.
Return Rpy2's eSet object, assayData, featureData, phenotypeData.
"""
importr('Biobase')
rdata = r.load(RData)
eSet = r.get(rdata) # rpy2 ExpressionSet object (assumed)
assayData = r.assayData(eSet) # rpy2 environment object
fData = r.fData(eSet) # rpy2 DataFrame object
pData = r.pData(eSet) # rpy2 DataFrame object
return eSet, assayData, fData, pData
def _parse_assayData(assayData, assay):
"""Parse Rpy2 assayData (Environment object)
assayData: Rpy2 Environment object.
assay: An assay name indicating the data to be loaded.
Return a parsed expression dataframe (Pandas).
"""
pandas2ri.activate()
mat = assayData[assay] # rpy2 expression matrix object
data = pandas2ri.ri2py(mat)
features = pandas2ri.ri2py(r.rownames(mat))
samples = pandas2ri.ri2py(r.colnames(mat))
return pd.DataFrame(data, index=features, columns=samples)
def _parse_rdataframe(rdf, factor_cols='auto'):
"""Parse an Rpy2 DataFrame.
rdf: An Rpy2 DataFrame object.
factor_cols: A list of column names indicating categorical data.
If 'auto', categorical columns will be determined by their contents, individually.
If None, do nothing and use the default dtypes.
Return a pandas dataframe.
"""
pandas2ri.activate()
df = pandas2ri.ri2py(rdf)
if factor_cols == 'auto':
# Infer if there are categorical data
n = df.shape[0]
subdf = df.select_dtypes(include=['integer', 'object'])
factor_cols = [k for k,v in subdf.iteritems() if n > 10 * len(set(v))]
if factor_cols:
# Make each column categorical
assert not isinstance(factor_cols, str)
for k in factor_cols:
df[k] = df[k].astype('category')
return df
# ================================================================================
# Input functions
# ================================================================================
def RData2ExpressionSet(RData, assay='exprs', fFactors='auto', pFactors='auto', verbose=True, **kwargs):
"""Read R's ExpressionSet (RData) to omics ExpressionSet (eSet)
RData: Path to the input RData file with only one eSet object inside.
assay: Assay name to be loaded.
f/pFactors: List of column names indicating categorical data (factors in R).
If 'auto', factor columns will be determined by their contents, individually.
If None, do nothing and use the default dtypes.
kwargs: Keyword arguments passed to ExpressionSet constructor
Return a omics ExpressionSet object (eSet).
"""
# Read RData into Rpy2 robjects
r_eSet, r_assayData, r_fData, r_pData = _read_ExpressionSet_RData(RData)
# Parse assayData, fData, and pData
exprs = _parse_assayData(r_assayData, assay)
fData = _parse_rdataframe(r_fData, fFactors) if len(r_fData) > 0 else None
pData = _parse_rdataframe(r_pData, pFactors) if len(r_pData) > 0 else None
# Add metadata
kwargs['source'] = RData
if verbose:
print "Loading eSet from", RData
print r_eSet
return ExpressionSet(exprs, fData, pData, **kwargs)
def HDF52ExpressionSet(HDF5, exprs='exprs', fData='fData', pData='pData', meta='meta', verbose=True):
"""Read HDF file into ExpressionSet.
HDF5: the input HDF5 path.
exprs: the name of the exprsstion table .
fData: the name of the feature table, or None.
pData: the name of the phenotype table, or None.
Return an ExpressionSet object.
"""
store = pd.HDFStore(HDF5)
hdf_exprs = store[exprs]
hdf_fData = store[fData] if isinstance(fData, str) else None
hdf_pData = store[pData] if isinstance(pData, str) else None
hdf_meta = store[meta] if isinstance(meta, str) else {}
hdf_meta['source'] = HDF5
if verbose:
print "Loading dataframes from", HDF5
print store
store.close()
return ExpressionSet(hdf_exprs, hdf_fData, hdf_pData, **hdf_meta)
# ================================================================================
# Output functions
# ================================================================================
def ExpressionSet2RData(eSet, RData, verbose=True):
"""Write ExpressionSet to RData as a single-assay ExpressionSet object
eSet: A omics ExpressionSet object
RData: Output RData filename
Note: Mutiple assay data currently not supported in this version.
Todo: Support multiple expresion matrixes into assayData.
"""
importr('Biobase')
r.assign("exprs", eSet.exprs)
r.assign("fdata", eSet.fData)
r.assign("pdata", eSet.pData)
r.assign("rdata", RData)
r("eSet = ExpressionSet(assayData=as.matrix(exprs), \
featureData=AnnotatedDataFrame(fdata), \
phenoData=AnnotatedDataFrame(pdata))")
r("save(eSet, file=rdata)")
if verbose:
print "Saving eSet to", RData
print r.eSet
def ExpressionSet2HDF5(eSet, HDF5, verbose=True):
"""Write ExpressionSet to HDF5 as a buch of Pandas dataframes
eSet: A omics ExpressionSet object
HDF5: Output HDF5 filename
Note: Mutiple assay data currently not supported in this version.
Todo: Support multiple expresion matrixes into assayData.
"""
store = pd.HDFStore(HDF5)
store['exprs'] = eSet.exprs
if not eSet.fData.empty: store.append('fData', eSet.fData)
if not eSet.pData.empty: store.append('pData', eSet.pData)
if not eSet.meta.empty: store.append('meta', eSet.meta)
if verbose:
print "Saving eSet dataframes to", HDF5
print store
store.close()
|
choyichen/omics
|
omics/io/ExpressionSetIO.py
|
Python
|
mit
| 6,917
|
[
"Bioconductor"
] |
23dc2b5a55727955a198bcf79edb59ba41c3ee832a6acaa7b1e339536845fc6f
|
#!/usr/bin/env python
'''
Spherical symmetry needs to be carefully treated in the atomic calculation.
The default initial guess may break the spherical symmetry. To preserve the
spherical symmetry in the atomic calculation, it is often needed to tune the
initial guess and SCF model.
See also 31-cr_atom_rohf_tune_init_guess.py
'''
import numpy
from pyscf import gto, scf
#
# Method 1: Construct the atomic initial guess from cation.
#
mol = gto.Mole()
mol.verbose = 4
mol.atom = 'V'
mol.basis = 'ccpvtz'
mol.symmetry = True
mol.spin = 0
mol.charge = 5
mol.build()
mf = scf.ROHF(mol)
irrep_nelec = {'A1g' :(4,3), 'E1gx': (1,0), 'E1gy': (1,0), 'E2gx': (1,0), 'E2gy': (1,0),
'A1u' :4, 'E1ux': 4, 'E1uy': 4}
mf.kernel()
# The output of .analyze() method can help to identify whether the spherical
# symmetry is conserved.
#mf.analyze()
# Set the system back to neutral atom
mol.spin = 5
mol.charge = 0
mf.irrep_nelec = mf.get_irrep_nelec()
mf.irrep_nelec['A1g'] = (4,3)
mf.irrep_nelec['E1gx'] = (1,0)
mf.irrep_nelec['E1gy'] = (1,0)
mf.irrep_nelec['E2gx'] = (1,0)
mf.irrep_nelec['E2gy'] = (1,0)
dm = mf.make_rdm1()
mf.kernel(dm)
#mf.analyze()
#
# Regular SCF iteration may break the spherical symmetry in may systems.
# Second order SCF model often works slightly better.
#
mf = mf.newton()
mf.kernel(dm)
#mf.analyze()
#
# Method 2: Construct the atomic initial guess of a large basis set from a
# calculation of small basis set.
#
mol = gto.Mole()
mol.verbose = 4
mol.atom = 'V'
mol.basis = 'minao'
mol.symmetry = True
mol.spin = 0
mol.charge = 5
mol.build()
mf = scf.ROHF(mol)
mf.kernel()
#
# Setup the system with large basis set
#
mol = gto.Mole()
mol.verbose = 4
mol.atom = 'V'
mol.basis = 'ccpvtz'
mol.symmetry = True
mol.spin = 5
mol.charge = 0
mol.build()
dm = mf.make_rdm1()
dm = scf.addons.project_dm_nr2nr(mol, dm, mol1)
mf = scf.ROHF(mol1)
mf.kernel(dm)
#mf.analyze()
#
# Second order SCF can be applied on the project density matrix as well
#
mf = mf.newton()
mf.kernel(dm)
#mf.analyze()
|
gkc1000/pyscf
|
examples/scf/31-v_atom_rohf.py
|
Python
|
apache-2.0
| 2,028
|
[
"PySCF"
] |
c3fda4f87ea2b66757115b5dbc798eb90387ffed00c2668b263a83fe920cdd79
|
"""This module defines an ASE interface to FHI-aims.
Felix Hanke hanke@liverpool.ac.uk
Jonas Bjork j.bjork@liverpool.ac.uk
"""
import os
import numpy as np
from ase.units import Hartree
from ase.io.aims import write_aims, read_aims
from ase.data import atomic_numbers
from ase.calculators.calculator import FileIOCalculator, Parameters, kpts2mp, \
ReadError
float_keys = [
'charge',
'charge_mix_param',
'default_initial_moment',
'fixed_spin_moment',
'hartree_convergence_parameter',
'harmonic_length_scale',
'ini_linear_mix_param',
'ini_spin_mix_parma',
'initial_moment',
'MD_MB_init',
'MD_time_step',
'prec_mix_param',
'set_vacuum_level',
'spin_mix_param',
]
exp_keys = [
'basis_threshold',
'occupation_thr',
'sc_accuracy_eev',
'sc_accuracy_etot',
'sc_accuracy_forces',
'sc_accuracy_rho',
'sc_accuracy_stress',
]
string_keys = [
'communication_type',
'density_update_method',
'KS_method',
'mixer',
'output_level',
'packed_matrix_format',
'relax_unit_cell',
'restart',
'restart_read_only',
'restart_write_only',
'spin',
'total_energy_method',
'qpe_calc',
'xc',
'species_dir',
'run_command',
]
int_keys = [
'empty_states',
'ini_linear_mixing',
'max_relaxation_steps',
'max_zeroin',
'multiplicity',
'n_max_pulay',
'sc_iter_limit',
'walltime',
]
bool_keys = [
'collect_eigenvectors',
'compute_forces',
'compute_kinetic',
'compute_numerical_stress',
'compute_analytical_stress',
'distributed_spline_storage',
'evaluate_work_function',
'final_forces_cleaned',
'hessian_to_restart_geometry',
'load_balancing',
'MD_clean_rotations',
'MD_restart',
'override_illconditioning',
'override_relativity',
'restart_relaxations',
'squeeze_memory',
'symmetry_reduced_k_grid',
'use_density_matrix',
'use_dipole_correction',
'use_local_index',
'use_logsbt',
'vdw_correction_hirshfeld',
]
list_keys = [
'init_hess',
'k_grid',
'k_offset',
'MD_run',
'MD_schedule',
'MD_segment',
'mixer_threshold',
'occupation_type',
'output',
'cube',
'preconditioner',
'relativistic',
'relax_geometry',
]
class Aims(FileIOCalculator):
command = 'aims.version.serial.x > aims.out'
implemented_properties = ['energy', 'forces', 'stress', 'dipole', 'magmom']
def __init__(self, restart=None, ignore_bad_restart_file=False,
label=os.curdir, atoms=None, cubes=None, radmul=None,
tier=None, **kwargs):
"""Construct FHI-aims calculator.
The keyword arguments (kwargs) can be one of the ASE standard
keywords: 'xc', 'kpts' and 'smearing' or any of FHI-aims'
native keywords.
Additional arguments:
cubes: AimsCube object
Cube file specification.
radmul: int
Set radial multiplier for the basis set of all atomic species.
tier: int or array of ints
Set basis set tier for all atomic species.
"""
try:
self.outfilename = kwargs.get('run_command').split()[-1]
except:
self.outfilename = 'aims.out'
FileIOCalculator.__init__(self, restart, ignore_bad_restart_file,
label, atoms,
command=kwargs.get('run_command'),
**kwargs)
self.cubes = cubes
self.radmul = radmul
self.tier = tier
def set_label(self, label):
self.label = label
self.directory = label
self.prefix = ''
self.out = os.path.join(label, self.outfilename)
def check_state(self, atoms):
system_changes = FileIOCalculator.check_state(self, atoms)
# Ignore unit cell for molecules:
if not atoms.pbc.any() and 'cell' in system_changes:
system_changes.remove('cell')
return system_changes
def set(self, **kwargs):
xc = kwargs.get('xc')
if xc:
kwargs['xc'] = {'LDA': 'pw-lda', 'PBE': 'pbe'}.get(xc, xc)
changed_parameters = FileIOCalculator.set(self, **kwargs)
if changed_parameters:
self.reset()
return changed_parameters
def write_input(self, atoms, properties=None, system_changes=None,
ghosts=None):
FileIOCalculator.write_input(self, atoms, properties, system_changes)
have_lattice_vectors = atoms.pbc.any()
have_k_grid = ('k_grid' in self.parameters or
'kpts' in self.parameters)
if have_lattice_vectors and not have_k_grid:
raise RuntimeError('Found lattice vectors but no k-grid!')
if not have_lattice_vectors and have_k_grid:
raise RuntimeError('Found k-grid but no lattice vectors!')
write_aims(os.path.join(self.directory, 'geometry.in'), atoms, ghosts)
self.write_control(atoms, os.path.join(self.directory, 'control.in'))
self.write_species(atoms, os.path.join(self.directory, 'control.in'))
self.parameters.write(os.path.join(self.directory, 'parameters.ase'))
def write_control(self, atoms, filename):
output = open(filename, 'w')
for line in ['=====================================================',
'FHI-aims file: ' + filename,
'Created using the Atomic Simulation Environment (ASE)',
'',
'List of parameters used to initialize the calculator:',
'=====================================================']:
output.write('#' + line + '\n')
assert not ('kpts' in self.parameters and 'k_grid' in self.parameters)
assert not ('smearing' in self.parameters and
'occupation_type' in self.parameters)
for key, value in self.parameters.items():
if key == 'kpts':
mp = kpts2mp(atoms, self.parameters.kpts)
output.write('%-35s%d %d %d\n' % (('k_grid',) + tuple(mp)))
dk = 0.5 - 0.5 / np.array(mp)
output.write('%-35s%f %f %f\n' % (('k_offset',) + tuple(dk)))
elif key == 'species_dir' or key == 'run_command':
continue
elif key == 'smearing':
name = self.parameters.smearing[0].lower()
if name == 'fermi-dirac':
name = 'fermi'
width = self.parameters.smearing[1]
output.write('%-35s%s %f' % ('occupation_type', name, width))
if name == 'methfessel-paxton':
order = self.parameters.smearing[2]
output.write(' %d' % order)
output.write('\n' % order)
elif key == 'output':
for output_type in value:
output.write('%-35s%s\n' % (key, output_type))
elif key == 'vdw_correction_hirshfeld' and value:
output.write('%-35s\n' % key)
elif key in bool_keys:
output.write('%-35s.%s.\n' % (key, repr(bool(value)).lower()))
elif isinstance(value, (tuple, list)):
output.write('%-35s%s\n' %
(key, ' '.join(str(x) for x in value)))
elif isinstance(value, str):
output.write('%-35s%s\n' % (key, value))
else:
output.write('%-35s%r\n' % (key, value))
if self.cubes:
self.cubes.write(output)
output.write(
'#=======================================================\n\n')
output.close()
def read(self, label):
FileIOCalculator.read(self, label)
geometry = os.path.join(self.directory, 'geometry.in')
control = os.path.join(self.directory, 'control.in')
for filename in [geometry, control, self.out]:
if not os.path.isfile(filename):
raise ReadError
self.atoms = read_aims(geometry)
self.parameters = Parameters.read(os.path.join(self.directory,
'parameters.ase'))
self.read_results()
def read_results(self):
converged = self.read_convergence()
if not converged:
os.system('tail -20 ' + self.out)
raise RuntimeError('FHI-aims did not converge!\n' +
'The last lines of output are printed above ' +
'and should give an indication why.')
self.read_energy()
if ('compute_forces' in self.parameters or
'sc_accuracy_forces' in self.parameters):
self.read_forces()
if ('compute_numerical_stress' in self.parameters or
'compute_analytical_stress' in self.parameters):
self.read_stress()
if ('dipole' in self.parameters.get('output', []) and
not self.atoms.pbc.any()):
self.read_dipole()
def write_species(self, atoms, filename='control.in'):
self.ctrlname = filename
species_path = self.parameters.get('species_dir')
if species_path is None:
species_path = os.environ.get('AIMS_SPECIES_DIR')
if species_path is None:
raise RuntimeError(
'Missing species directory! Use species_dir ' +
'parameter or set $AIMS_SPECIES_DIR environment variable.')
control = open(filename, 'a')
symbols = atoms.get_chemical_symbols()
symbols2 = []
for n, symbol in enumerate(symbols):
if symbol not in symbols2:
symbols2.append(symbol)
if self.tier is not None:
if isinstance(self.tier, int):
self.tierlist = np.ones(len(symbols2), 'int') * self.tier
elif isinstance(self.tier, list):
assert len(self.tier) == len(symbols2)
self.tierlist = self.tier
for i, symbol in enumerate(symbols2):
fd = os.path.join(species_path, '%02i_%s_default' %
(atomic_numbers[symbol], symbol))
reached_tiers = False
for line in open(fd, 'r'):
if self.tier is not None:
if 'First tier' in line:
reached_tiers = True
self.targettier = self.tierlist[i]
self.foundtarget = False
self.do_uncomment = True
if reached_tiers:
line = self.format_tiers(line)
control.write(line)
if self.tier is not None and not self.foundtarget:
raise RuntimeError(
"Basis tier %i not found for element %s" %
(self.targettier, symbol))
control.close()
if self.radmul is not None:
self.set_radial_multiplier()
def format_tiers(self, line):
if 'meV' in line:
assert line[0] == '#'
if 'tier' in line and 'Further' not in line:
tier = line.split(" tier")[0]
tier = tier.split('"')[-1]
current_tier = self.translate_tier(tier)
if current_tier == self.targettier:
self.foundtarget = True
elif current_tier > self.targettier:
self.do_uncomment = False
else:
self.do_uncomment = False
return line
elif self.do_uncomment and line[0] == '#':
return line[1:]
elif not self.do_uncomment and line[0] != '#':
return '#' + line
else:
return line
def translate_tier(self, tier):
if tier.lower() == 'first':
return 1
elif tier.lower() == 'second':
return 2
elif tier.lower() == 'third':
return 3
elif tier.lower() == 'fourth':
return 4
else:
return -1
def set_radial_multiplier(self):
assert isinstance(self.radmul, int)
newctrl = self.ctrlname +'.new'
fin = open(self.ctrlname, 'r')
fout = open(newctrl, 'w')
newline = " radial_multiplier %i\n" % self.radmul
for line in fin:
if ' radial_multiplier' in line:
fout.write(newline)
else:
fout.write(line)
fin.close()
fout.close()
os.rename(newctrl, self.ctrlname)
def get_dipole_moment(self, atoms):
if ('dipole' not in self.parameters.get('output', []) or
atoms.pbc.any()):
raise NotImplementedError
return FileIOCalculator.get_dipole_moment(self, atoms)
def get_stress(self, atoms):
if ('compute_numerical_stress' not in self.parameters and
'compute_analytical_stress' not in self.parameters):
raise NotImplementedError
return FileIOCalculator.get_stress(self, atoms)
def get_forces(self, atoms):
if ('compute_forces' not in self.parameters and
'sc_accuracy_forces' not in self.parameters):
raise NotImplementedError
return FileIOCalculator.get_forces(self, atoms)
def read_dipole(self):
"Method that reads the electric dipole moment from the output file."
for line in open(self.out, 'r'):
if line.rfind('Total dipole moment [eAng]') > -1:
dipolemoment = np.array([float(f)
for f in line.split()[6:9]])
self.results['dipole'] = dipolemoment
def read_energy(self):
for line in open(self.out, 'r'):
if line.rfind('Total energy corrected') > -1:
E0 = float(line.split()[5])
elif line.rfind('Total energy uncorrected') > -1:
F = float(line.split()[5])
self.results['free_energy'] = F
self.results['energy'] = E0
def read_forces(self):
"""Method that reads forces from the output file.
If 'all' is switched on, the forces for all ionic steps
in the output file will be returned, in other case only the
forces for the last ionic configuration are returned."""
lines = open(self.out, 'r').readlines()
forces = np.zeros([len(self.atoms), 3])
for n, line in enumerate(lines):
if line.rfind('Total atomic forces') > -1:
for iatom in range(len(self.atoms)):
data = lines[n + iatom + 1].split()
for iforce in range(3):
forces[iatom, iforce] = float(data[2 + iforce])
self.results['forces'] = forces
def read_stress(self):
lines = open(self.out, 'r').readlines()
stress = None
for n, line in enumerate(lines):
if (line.rfind('| Analytical stress tensor') > -1 or
line.rfind('Numerical stress tensor') > -1):
stress = []
for i in [n + 5, n + 6, n + 7]:
data = lines[i].split()
stress += [float(data[2]), float(data[3]), float(data[4])]
# rearrange in 6-component form and return
self.results['stress'] = np.array([stress[0], stress[4], stress[8],
stress[5], stress[2], stress[1]])
def read_convergence(self):
converged = False
lines = open(self.out, 'r').readlines()
for n, line in enumerate(lines):
if line.rfind('Have a nice day') > -1:
converged = True
return converged
def get_number_of_iterations(self):
return self.read_number_of_iterations()
def read_number_of_iterations(self):
niter = None
lines = open(self.out, 'r').readlines()
for n, line in enumerate(lines):
if line.rfind('| Number of self-consistency cycles') > -1:
niter = int(line.split(':')[-1].strip())
return niter
def get_electronic_temperature(self):
return self.read_electronic_temperature()
def read_electronic_temperature(self):
width = None
lines = open(self.out, 'r').readlines()
for n, line in enumerate(lines):
if line.rfind('Occupation type:') > -1:
width = float(line.split('=')[-1].strip().split()[0])
return width
def get_number_of_electrons(self):
return self.read_number_of_electrons()
def read_number_of_electrons(self):
nelect = None
lines = open(self.out, 'r').readlines()
for n, line in enumerate(lines):
if line.rfind('The structure contains') > -1:
nelect = float(line.split()[-2].strip())
return nelect
def get_number_of_bands(self):
return self.read_number_of_bands()
def read_number_of_bands(self):
nband = None
lines = open(self.out, 'r').readlines()
for n, line in enumerate(lines):
if line.rfind('Number of Kohn-Sham states') > -1:
nband = int(line.split(':')[-1].strip())
return nband
def get_k_point_weights(self):
return self.read_kpts(mode='k_point_weights')
def get_bz_k_points(self):
raise NotImplementedError
def get_ibz_k_points(self):
return self.read_kpts(mode='ibz_k_points')
def get_spin_polarized(self):
return self.read_number_of_spins()
def get_number_of_spins(self):
return 1 + self.get_spin_polarized()
def get_magnetic_moment(self, atoms=None):
return self.read_magnetic_moment()
def read_number_of_spins(self):
spinpol = None
lines = open(self.out, 'r').readlines()
for n, line in enumerate(lines):
if line.rfind('| Number of spin channels') > -1:
spinpol = int(line.split(':')[-1].strip()) - 1
return spinpol
def read_magnetic_moment(self):
magmom = None
if not self.get_spin_polarized():
magmom = 0.0
else: # only for spinpolarized system Magnetisation is printed
for line in open(self.out, 'r').readlines():
if line.find('N_up - N_down') != -1: # last one
magmom = float(line.split(':')[-1].strip())
return magmom
def get_fermi_level(self):
return self.read_fermi()
def get_eigenvalues(self, kpt=0, spin=0):
return self.read_eigenvalues(kpt, spin, 'eigenvalues')
def get_occupations(self, kpt=0, spin=0):
return self.read_eigenvalues(kpt, spin, 'occupations')
def read_fermi(self):
E_f = None
lines = open(self.out, 'r').readlines()
for n, line in enumerate(lines):
if line.rfind('| Chemical potential (Fermi level) in eV') > -1:
E_f = float(line.split(':')[-1].strip())
return E_f
def read_kpts(self, mode='ibz_k_points'):
""" Returns list of kpts weights or kpts coordinates. """
values = []
assert mode in ['ibz_k_points', 'k_point_weights']
lines = open(self.out, 'r').readlines()
kpts = None
kptsstart = None
for n, line in enumerate(lines):
if line.rfind('| Number of k-points') > -1:
kpts = int(line.split(':')[-1].strip())
for n, line in enumerate(lines):
if line.rfind('K-points in task') > -1:
kptsstart = n # last occurence of (
assert not kpts is None
assert not kptsstart is None
text = lines[kptsstart + 1:]
values = []
for line in text[:kpts]:
if mode == 'ibz_k_points':
b = [float(c.strip()) for c in line.split()[4:7]]
else:
b = float(line.split()[-1])
values.append(b)
if len(values) == 0:
values = None
return np.array(values)
def read_eigenvalues(self, kpt=0, spin=0, mode='eigenvalues'):
""" Returns list of last eigenvalues, occupations
for given kpt and spin. """
values = []
assert mode in ['eigenvalues', 'occupations']
lines = open(self.out, 'r').readlines()
# number of kpts
kpts = None
for n, line in enumerate(lines):
if line.rfind('| Number of k-points') > -1:
kpts = int(line.split(':')[-1].strip())
break
assert not kpts is None
assert kpt + 1 <= kpts
# find last (eigenvalues)
eigvalstart = None
for n, line in enumerate(lines):
# eigenvalues come after Preliminary charge convergence reached
if line.rfind('Preliminary charge convergence reached') > -1:
eigvalstart = n
break
assert not eigvalstart is None
lines = lines[eigvalstart:]
for n, line in enumerate(lines):
if line.rfind('Writing Kohn-Sham eigenvalues') > -1:
eigvalstart = n
break
assert not eigvalstart is None
text = lines[eigvalstart + 1:] # remove first 1 line
# find the requested k-point
nbands = self.read_number_of_bands()
sppol = self.get_spin_polarized()
beg = ((nbands + 4 + int(sppol) * 1) * kpt * (sppol + 1) +
3 + sppol * 2 + kpt * sppol)
if self.get_spin_polarized():
if spin == 0:
beg = beg
end = beg + nbands
else:
beg = beg + nbands + 5
end = beg + nbands
else:
end = beg + nbands
values = []
for line in text[beg:end]:
# aims prints stars for large values ...
line = line.replace('**************', ' 10000')
line = line.replace('***************', ' 10000')
line = line.replace('****************', ' 10000')
b = [float(c.strip()) for c in line.split()[1:]]
values.append(b)
if mode == 'eigenvalues':
values = [Hartree * v[1] for v in values]
else:
values = [v[0] for v in values]
if len(values) == 0:
values = None
return np.array(values)
class AimsCube:
"Object to ensure the output of cube files, can be attached to Aims object"
def __init__(self, origin=(0, 0, 0),
edges=[(0.1, 0.0, 0.0), (0.0, 0.1, 0.0), (0.0, 0.0, 0.1)],
points=(50, 50, 50), plots=None):
"""parameters:
origin, edges, points:
Same as in the FHI-aims output
plots:
what to print, same names as in FHI-aims """
self.name = 'AimsCube'
self.origin = origin
self.edges = edges
self.points = points
self.plots = plots
def ncubes(self):
"""returns the number of cube files to output """
if self.plots:
number = len(self.plots)
else:
number = 0
return number
def set(self, **kwargs):
""" set any of the parameters ... """
# NOT IMPLEMENTED AT THE MOMENT!
def move_to_base_name(self, basename):
""" when output tracking is on or the base namem is not standard,
this routine will rename add the base to the cube file output for
easier tracking """
for plot in self.plots:
found = False
cube = plot.split()
if (cube[0] == 'total_density' or
cube[0] == 'spin_density' or
cube[0] == 'delta_density'):
found = True
old_name = cube[0] + '.cube'
new_name = basename + '.' + old_name
if cube[0] == 'eigenstate' or cube[0] == 'eigenstate_density':
found = True
state = int(cube[1])
s_state = cube[1]
for i in [10, 100, 1000, 10000]:
if state < i:
s_state = '0' + s_state
old_name = cube[0] + '_' + s_state + '_spin_1.cube'
new_name = basename + '.' + old_name
if found:
os.system('mv ' + old_name + ' ' + new_name)
def add_plot(self, name):
""" in case you forgot one ... """
self.plots += [name]
def write(self, file):
""" write the necessary output to the already opened control.in """
file.write('output cube ' + self.plots[0] + '\n')
file.write(' cube origin ')
for ival in self.origin:
file.write(str(ival) + ' ')
file.write('\n')
for i in range(3):
file.write(' cube edge ' + str(self.points[i]) + ' ')
for ival in self.edges[i]:
file.write(str(ival) + ' ')
file.write('\n')
if self.ncubes() > 1:
for i in range(self.ncubes() - 1):
file.write('output cube ' + self.plots[i + 1] + '\n')
|
suttond/MODOI
|
ase/calculators/aims.py
|
Python
|
lgpl-3.0
| 25,262
|
[
"ASE",
"DIRAC",
"FHI-aims"
] |
0a972e69fe26b75eff63bb1c3b62ad00b9fb56d625f7dddf68933518b9351618
|
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
##
## Copyright (C) 2012 Async Open Source <http://www.async.com.br>
## All rights reserved
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., or visit: http://www.gnu.org/.
##
## Author(s): Stoq Team <stoq-devel@async.com.br>
##
import mock
from stoqlib.gui.search.creditcheckhistorysearch import CreditCheckHistorySearch
from stoqlib.gui.test.uitestutils import GUITest
class TestCreditCheckHistorySearch(GUITest):
def test_show(self):
user = self.create_user()
credit_check1 = self.create_credit_check_history(user)
credit_check1.identifier = u'1'
user2 = self.create_user(username=u'username2')
client = self.create_client()
client.person.name = u'Client2'
credit_check2 = self.create_credit_check_history(user2, client)
credit_check2.identifier = u'2'
credit_check3 = self.create_credit_check_history(user2, client)
credit_check3.identifier = u'3'
# displaying all
dialog = CreditCheckHistorySearch(self.store)
self.click(dialog.search.search_button)
self.check_dialog(dialog, 'credit-check-history-search-show-all')
# displaying a single client
dialog = CreditCheckHistorySearch(self.store, client)
self.click(dialog.search.search_button)
self.check_dialog(dialog, 'credit-check-history-search-show-single')
@mock.patch('stoqlib.gui.search.creditcheckhistorysearch.run_dialog')
def test_edit(self, run_dialog):
credit_check = self.create_credit_check_history()
dialog = CreditCheckHistorySearch(self.store, reuse_store=True)
self.click(dialog.search.search_button)
dialog.results.double_click(0)
run_dialog.assert_called_once_with(dialog.editor_class, dialog,
self.store, credit_check, None,
visual_mode=True)
@mock.patch('stoqlib.gui.search.creditcheckhistorysearch.run_dialog')
def test_new(self, run_dialog):
dialog = CreditCheckHistorySearch(self.store)
self.click(dialog._toolbar.new_button)
self.assertEquals(run_dialog.call_count, 1)
|
tiagocardosos/stoq
|
stoqlib/gui/test/test_creditcheckhistorysearch.py
|
Python
|
gpl-2.0
| 2,838
|
[
"VisIt"
] |
9d9cd12579bddccdd16bbd07d1d465390ca45f0b8b8257356ee70bfc189a3192
|
# coding: utf-8
from __future__ import unicode_literals
import itertools
import re
from .common import InfoExtractor
from ..utils import (
clean_html,
dict_get,
ExtractorError,
float_or_none,
get_element_by_class,
int_or_none,
js_to_json,
parse_duration,
parse_iso8601,
try_get,
unescapeHTML,
url_or_none,
urlencode_postdata,
urljoin,
)
from ..compat import (
compat_etree_Element,
compat_HTTPError,
compat_urlparse,
)
class BBCCoUkIE(InfoExtractor):
IE_NAME = 'bbc.co.uk'
IE_DESC = 'BBC iPlayer'
_ID_REGEX = r'(?:[pbm][\da-z]{7}|w[\da-z]{7,14})'
_VALID_URL = r'''(?x)
https?://
(?:www\.)?bbc\.co\.uk/
(?:
programmes/(?!articles/)|
iplayer(?:/[^/]+)?/(?:episode/|playlist/)|
music/(?:clips|audiovideo/popular)[/#]|
radio/player/|
sounds/play/|
events/[^/]+/play/[^/]+/
)
(?P<id>%s)(?!/(?:episodes|broadcasts|clips))
''' % _ID_REGEX
_LOGIN_URL = 'https://account.bbc.com/signin'
_NETRC_MACHINE = 'bbc'
_MEDIA_SELECTOR_URL_TEMPL = 'https://open.live.bbc.co.uk/mediaselector/6/select/version/2.0/mediaset/%s/vpid/%s'
_MEDIA_SETS = [
# Provides HQ HLS streams with even better quality that pc mediaset but fails
# with geolocation in some cases when it's even not geo restricted at all (e.g.
# http://www.bbc.co.uk/programmes/b06bp7lf). Also may fail with selectionunavailable.
'iptv-all',
'pc',
]
_EMP_PLAYLIST_NS = 'http://bbc.co.uk/2008/emp/playlist'
_TESTS = [
{
'url': 'http://www.bbc.co.uk/programmes/b039g8p7',
'info_dict': {
'id': 'b039d07m',
'ext': 'flv',
'title': 'Kaleidoscope, Leonard Cohen',
'description': 'The Canadian poet and songwriter reflects on his musical career.',
},
'params': {
# rtmp download
'skip_download': True,
}
},
{
'url': 'http://www.bbc.co.uk/iplayer/episode/b00yng5w/The_Man_in_Black_Series_3_The_Printed_Name/',
'info_dict': {
'id': 'b00yng1d',
'ext': 'flv',
'title': 'The Man in Black: Series 3: The Printed Name',
'description': "Mark Gatiss introduces Nicholas Pierpan's chilling tale of a writer's devilish pact with a mysterious man. Stars Ewan Bailey.",
'duration': 1800,
},
'params': {
# rtmp download
'skip_download': True,
},
'skip': 'Episode is no longer available on BBC iPlayer Radio',
},
{
'url': 'http://www.bbc.co.uk/iplayer/episode/b03vhd1f/The_Voice_UK_Series_3_Blind_Auditions_5/',
'info_dict': {
'id': 'b00yng1d',
'ext': 'flv',
'title': 'The Voice UK: Series 3: Blind Auditions 5',
'description': 'Emma Willis and Marvin Humes present the fifth set of blind auditions in the singing competition, as the coaches continue to build their teams based on voice alone.',
'duration': 5100,
},
'params': {
# rtmp download
'skip_download': True,
},
'skip': 'Currently BBC iPlayer TV programmes are available to play in the UK only',
},
{
'url': 'http://www.bbc.co.uk/iplayer/episode/p026c7jt/tomorrows-worlds-the-unearthly-history-of-science-fiction-2-invasion',
'info_dict': {
'id': 'b03k3pb7',
'ext': 'flv',
'title': "Tomorrow's Worlds: The Unearthly History of Science Fiction",
'description': '2. Invasion',
'duration': 3600,
},
'params': {
# rtmp download
'skip_download': True,
},
'skip': 'Currently BBC iPlayer TV programmes are available to play in the UK only',
}, {
'url': 'http://www.bbc.co.uk/programmes/b04v20dw',
'info_dict': {
'id': 'b04v209v',
'ext': 'flv',
'title': 'Pete Tong, The Essential New Tune Special',
'description': "Pete has a very special mix - all of 2014's Essential New Tunes!",
'duration': 10800,
},
'params': {
# rtmp download
'skip_download': True,
},
'skip': 'Episode is no longer available on BBC iPlayer Radio',
}, {
'url': 'http://www.bbc.co.uk/music/clips/p022h44b',
'note': 'Audio',
'info_dict': {
'id': 'p022h44j',
'ext': 'flv',
'title': 'BBC Proms Music Guides, Rachmaninov: Symphonic Dances',
'description': "In this Proms Music Guide, Andrew McGregor looks at Rachmaninov's Symphonic Dances.",
'duration': 227,
},
'params': {
# rtmp download
'skip_download': True,
}
}, {
'url': 'http://www.bbc.co.uk/music/clips/p025c0zz',
'note': 'Video',
'info_dict': {
'id': 'p025c103',
'ext': 'flv',
'title': 'Reading and Leeds Festival, 2014, Rae Morris - Closer (Live on BBC Three)',
'description': 'Rae Morris performs Closer for BBC Three at Reading 2014',
'duration': 226,
},
'params': {
# rtmp download
'skip_download': True,
}
}, {
'url': 'http://www.bbc.co.uk/iplayer/episode/b054fn09/ad/natural-world-20152016-2-super-powered-owls',
'info_dict': {
'id': 'p02n76xf',
'ext': 'flv',
'title': 'Natural World, 2015-2016: 2. Super Powered Owls',
'description': 'md5:e4db5c937d0e95a7c6b5e654d429183d',
'duration': 3540,
},
'params': {
# rtmp download
'skip_download': True,
},
'skip': 'geolocation',
}, {
'url': 'http://www.bbc.co.uk/iplayer/episode/b05zmgwn/royal-academy-summer-exhibition',
'info_dict': {
'id': 'b05zmgw1',
'ext': 'flv',
'description': 'Kirsty Wark and Morgan Quaintance visit the Royal Academy as it prepares for its annual artistic extravaganza, meeting people who have come together to make the show unique.',
'title': 'Royal Academy Summer Exhibition',
'duration': 3540,
},
'params': {
# rtmp download
'skip_download': True,
},
'skip': 'geolocation',
}, {
# iptv-all mediaset fails with geolocation however there is no geo restriction
# for this programme at all
'url': 'http://www.bbc.co.uk/programmes/b06rkn85',
'info_dict': {
'id': 'b06rkms3',
'ext': 'flv',
'title': "Best of the Mini-Mixes 2015: Part 3, Annie Mac's Friday Night - BBC Radio 1",
'description': "Annie has part three in the Best of the Mini-Mixes 2015, plus the year's Most Played!",
},
'params': {
# rtmp download
'skip_download': True,
},
'skip': 'Now it\'s really geo-restricted',
}, {
# compact player (https://github.com/ytdl-org/youtube-dl/issues/8147)
'url': 'http://www.bbc.co.uk/programmes/p028bfkf/player',
'info_dict': {
'id': 'p028bfkj',
'ext': 'flv',
'title': 'Extract from BBC documentary Look Stranger - Giant Leeks and Magic Brews',
'description': 'Extract from BBC documentary Look Stranger - Giant Leeks and Magic Brews',
},
'params': {
# rtmp download
'skip_download': True,
},
}, {
'url': 'https://www.bbc.co.uk/sounds/play/m0007jzb',
'note': 'Audio',
'info_dict': {
'id': 'm0007jz9',
'ext': 'mp4',
'title': 'BBC Proms, 2019, Prom 34: West–Eastern Divan Orchestra',
'description': "Live BBC Proms. West–Eastern Divan Orchestra with Daniel Barenboim and Martha Argerich.",
'duration': 9840,
},
'params': {
# rtmp download
'skip_download': True,
}
}, {
'url': 'http://www.bbc.co.uk/iplayer/playlist/p01dvks4',
'only_matching': True,
}, {
'url': 'http://www.bbc.co.uk/music/clips#p02frcc3',
'only_matching': True,
}, {
'url': 'http://www.bbc.co.uk/iplayer/cbeebies/episode/b0480276/bing-14-atchoo',
'only_matching': True,
}, {
'url': 'http://www.bbc.co.uk/radio/player/p03cchwf',
'only_matching': True,
}, {
'url': 'https://www.bbc.co.uk/music/audiovideo/popular#p055bc55',
'only_matching': True,
}, {
'url': 'http://www.bbc.co.uk/programmes/w3csv1y9',
'only_matching': True,
}, {
'url': 'https://www.bbc.co.uk/programmes/m00005xn',
'only_matching': True,
}, {
'url': 'https://www.bbc.co.uk/programmes/w172w4dww1jqt5s',
'only_matching': True,
}]
def _login(self):
username, password = self._get_login_info()
if username is None:
return
login_page = self._download_webpage(
self._LOGIN_URL, None, 'Downloading signin page')
login_form = self._hidden_inputs(login_page)
login_form.update({
'username': username,
'password': password,
})
post_url = urljoin(self._LOGIN_URL, self._search_regex(
r'<form[^>]+action=(["\'])(?P<url>.+?)\1', login_page,
'post url', default=self._LOGIN_URL, group='url'))
response, urlh = self._download_webpage_handle(
post_url, None, 'Logging in', data=urlencode_postdata(login_form),
headers={'Referer': self._LOGIN_URL})
if self._LOGIN_URL in urlh.geturl():
error = clean_html(get_element_by_class('form-message', response))
if error:
raise ExtractorError(
'Unable to login: %s' % error, expected=True)
raise ExtractorError('Unable to log in')
def _real_initialize(self):
self._login()
class MediaSelectionError(Exception):
def __init__(self, id):
self.id = id
def _extract_asx_playlist(self, connection, programme_id):
asx = self._download_xml(connection.get('href'), programme_id, 'Downloading ASX playlist')
return [ref.get('href') for ref in asx.findall('./Entry/ref')]
def _extract_items(self, playlist):
return playlist.findall('./{%s}item' % self._EMP_PLAYLIST_NS)
def _extract_medias(self, media_selection):
error = media_selection.get('result')
if error:
raise BBCCoUkIE.MediaSelectionError(error)
return media_selection.get('media') or []
def _extract_connections(self, media):
return media.get('connection') or []
def _get_subtitles(self, media, programme_id):
subtitles = {}
for connection in self._extract_connections(media):
cc_url = url_or_none(connection.get('href'))
if not cc_url:
continue
captions = self._download_xml(
cc_url, programme_id, 'Downloading captions', fatal=False)
if not isinstance(captions, compat_etree_Element):
continue
subtitles['en'] = [
{
'url': connection.get('href'),
'ext': 'ttml',
},
]
break
return subtitles
def _raise_extractor_error(self, media_selection_error):
raise ExtractorError(
'%s returned error: %s' % (self.IE_NAME, media_selection_error.id),
expected=True)
def _download_media_selector(self, programme_id):
last_exception = None
for media_set in self._MEDIA_SETS:
try:
return self._download_media_selector_url(
self._MEDIA_SELECTOR_URL_TEMPL % (media_set, programme_id), programme_id)
except BBCCoUkIE.MediaSelectionError as e:
if e.id in ('notukerror', 'geolocation', 'selectionunavailable'):
last_exception = e
continue
self._raise_extractor_error(e)
self._raise_extractor_error(last_exception)
def _download_media_selector_url(self, url, programme_id=None):
media_selection = self._download_json(
url, programme_id, 'Downloading media selection JSON',
expected_status=(403, 404))
return self._process_media_selector(media_selection, programme_id)
def _process_media_selector(self, media_selection, programme_id):
formats = []
subtitles = None
urls = []
for media in self._extract_medias(media_selection):
kind = media.get('kind')
if kind in ('video', 'audio'):
bitrate = int_or_none(media.get('bitrate'))
encoding = media.get('encoding')
width = int_or_none(media.get('width'))
height = int_or_none(media.get('height'))
file_size = int_or_none(media.get('media_file_size'))
for connection in self._extract_connections(media):
href = connection.get('href')
if href in urls:
continue
if href:
urls.append(href)
conn_kind = connection.get('kind')
protocol = connection.get('protocol')
supplier = connection.get('supplier')
transfer_format = connection.get('transferFormat')
format_id = supplier or conn_kind or protocol
# ASX playlist
if supplier == 'asx':
for i, ref in enumerate(self._extract_asx_playlist(connection, programme_id)):
formats.append({
'url': ref,
'format_id': 'ref%s_%s' % (i, format_id),
})
elif transfer_format == 'dash':
formats.extend(self._extract_mpd_formats(
href, programme_id, mpd_id=format_id, fatal=False))
elif transfer_format == 'hls':
formats.extend(self._extract_m3u8_formats(
href, programme_id, ext='mp4', entry_protocol='m3u8_native',
m3u8_id=format_id, fatal=False))
elif transfer_format == 'hds':
formats.extend(self._extract_f4m_formats(
href, programme_id, f4m_id=format_id, fatal=False))
else:
if not supplier and bitrate:
format_id += '-%d' % bitrate
fmt = {
'format_id': format_id,
'filesize': file_size,
}
if kind == 'video':
fmt.update({
'width': width,
'height': height,
'tbr': bitrate,
'vcodec': encoding,
})
else:
fmt.update({
'abr': bitrate,
'acodec': encoding,
'vcodec': 'none',
})
if protocol in ('http', 'https'):
# Direct link
fmt.update({
'url': href,
})
elif protocol == 'rtmp':
application = connection.get('application', 'ondemand')
auth_string = connection.get('authString')
identifier = connection.get('identifier')
server = connection.get('server')
fmt.update({
'url': '%s://%s/%s?%s' % (protocol, server, application, auth_string),
'play_path': identifier,
'app': '%s?%s' % (application, auth_string),
'page_url': 'http://www.bbc.co.uk',
'player_url': 'http://www.bbc.co.uk/emp/releases/iplayer/revisions/617463_618125_4/617463_618125_4_emp.swf',
'rtmp_live': False,
'ext': 'flv',
})
else:
continue
formats.append(fmt)
elif kind == 'captions':
subtitles = self.extract_subtitles(media, programme_id)
return formats, subtitles
def _download_playlist(self, playlist_id):
try:
playlist = self._download_json(
'http://www.bbc.co.uk/programmes/%s/playlist.json' % playlist_id,
playlist_id, 'Downloading playlist JSON')
version = playlist.get('defaultAvailableVersion')
if version:
smp_config = version['smpConfig']
title = smp_config['title']
description = smp_config['summary']
for item in smp_config['items']:
kind = item['kind']
if kind not in ('programme', 'radioProgramme'):
continue
programme_id = item.get('vpid')
duration = int_or_none(item.get('duration'))
formats, subtitles = self._download_media_selector(programme_id)
return programme_id, title, description, duration, formats, subtitles
except ExtractorError as ee:
if not (isinstance(ee.cause, compat_HTTPError) and ee.cause.code == 404):
raise
# fallback to legacy playlist
return self._process_legacy_playlist(playlist_id)
def _process_legacy_playlist_url(self, url, display_id):
playlist = self._download_legacy_playlist_url(url, display_id)
return self._extract_from_legacy_playlist(playlist, display_id)
def _process_legacy_playlist(self, playlist_id):
return self._process_legacy_playlist_url(
'http://www.bbc.co.uk/iplayer/playlist/%s' % playlist_id, playlist_id)
def _download_legacy_playlist_url(self, url, playlist_id=None):
return self._download_xml(
url, playlist_id, 'Downloading legacy playlist XML')
def _extract_from_legacy_playlist(self, playlist, playlist_id):
no_items = playlist.find('./{%s}noItems' % self._EMP_PLAYLIST_NS)
if no_items is not None:
reason = no_items.get('reason')
if reason == 'preAvailability':
msg = 'Episode %s is not yet available' % playlist_id
elif reason == 'postAvailability':
msg = 'Episode %s is no longer available' % playlist_id
elif reason == 'noMedia':
msg = 'Episode %s is not currently available' % playlist_id
else:
msg = 'Episode %s is not available: %s' % (playlist_id, reason)
raise ExtractorError(msg, expected=True)
for item in self._extract_items(playlist):
kind = item.get('kind')
if kind not in ('programme', 'radioProgramme'):
continue
title = playlist.find('./{%s}title' % self._EMP_PLAYLIST_NS).text
description_el = playlist.find('./{%s}summary' % self._EMP_PLAYLIST_NS)
description = description_el.text if description_el is not None else None
def get_programme_id(item):
def get_from_attributes(item):
for p in ('identifier', 'group'):
value = item.get(p)
if value and re.match(r'^[pb][\da-z]{7}$', value):
return value
get_from_attributes(item)
mediator = item.find('./{%s}mediator' % self._EMP_PLAYLIST_NS)
if mediator is not None:
return get_from_attributes(mediator)
programme_id = get_programme_id(item)
duration = int_or_none(item.get('duration'))
if programme_id:
formats, subtitles = self._download_media_selector(programme_id)
else:
formats, subtitles = self._process_media_selector(item, playlist_id)
programme_id = playlist_id
return programme_id, title, description, duration, formats, subtitles
def _real_extract(self, url):
group_id = self._match_id(url)
webpage = self._download_webpage(url, group_id, 'Downloading video page')
error = self._search_regex(
r'<div\b[^>]+\bclass=["\'](?:smp|playout)__message delta["\'][^>]*>\s*([^<]+?)\s*<',
webpage, 'error', default=None)
if error:
raise ExtractorError(error, expected=True)
programme_id = None
duration = None
tviplayer = self._search_regex(
r'mediator\.bind\(({.+?})\s*,\s*document\.getElementById',
webpage, 'player', default=None)
if tviplayer:
player = self._parse_json(tviplayer, group_id).get('player', {})
duration = int_or_none(player.get('duration'))
programme_id = player.get('vpid')
if not programme_id:
programme_id = self._search_regex(
r'"vpid"\s*:\s*"(%s)"' % self._ID_REGEX, webpage, 'vpid', fatal=False, default=None)
if programme_id:
formats, subtitles = self._download_media_selector(programme_id)
title = self._og_search_title(webpage, default=None) or self._html_search_regex(
(r'<h2[^>]+id="parent-title"[^>]*>(.+?)</h2>',
r'<div[^>]+class="info"[^>]*>\s*<h1>(.+?)</h1>'), webpage, 'title')
description = self._search_regex(
(r'<p class="[^"]*medium-description[^"]*">([^<]+)</p>',
r'<div[^>]+class="info_+synopsis"[^>]*>([^<]+)</div>'),
webpage, 'description', default=None)
if not description:
description = self._html_search_meta('description', webpage)
else:
programme_id, title, description, duration, formats, subtitles = self._download_playlist(group_id)
self._sort_formats(formats)
return {
'id': programme_id,
'title': title,
'description': description,
'thumbnail': self._og_search_thumbnail(webpage, default=None),
'duration': duration,
'formats': formats,
'subtitles': subtitles,
}
class BBCIE(BBCCoUkIE):
IE_NAME = 'bbc'
IE_DESC = 'BBC'
_VALID_URL = r'https?://(?:www\.)?bbc\.(?:com|co\.uk)/(?:[^/]+/)+(?P<id>[^/#?]+)'
_MEDIA_SETS = [
'mobile-tablet-main',
'pc',
]
_TESTS = [{
# article with multiple videos embedded with data-playable containing vpids
'url': 'http://www.bbc.com/news/world-europe-32668511',
'info_dict': {
'id': 'world-europe-32668511',
'title': 'Russia stages massive WW2 parade',
'description': 'md5:00ff61976f6081841f759a08bf78cc9c',
},
'playlist_count': 2,
}, {
# article with multiple videos embedded with data-playable (more videos)
'url': 'http://www.bbc.com/news/business-28299555',
'info_dict': {
'id': 'business-28299555',
'title': 'Farnborough Airshow: Video highlights',
'description': 'BBC reports and video highlights at the Farnborough Airshow.',
},
'playlist_count': 9,
'skip': 'Save time',
}, {
# article with multiple videos embedded with `new SMP()`
# broken
'url': 'http://www.bbc.co.uk/blogs/adamcurtis/entries/3662a707-0af9-3149-963f-47bea720b460',
'info_dict': {
'id': '3662a707-0af9-3149-963f-47bea720b460',
'title': 'BUGGER',
},
'playlist_count': 18,
}, {
# single video embedded with data-playable containing vpid
'url': 'http://www.bbc.com/news/world-europe-32041533',
'info_dict': {
'id': 'p02mprgb',
'ext': 'mp4',
'title': 'Aerial footage showed the site of the crash in the Alps - courtesy BFM TV',
'description': 'md5:2868290467291b37feda7863f7a83f54',
'duration': 47,
'timestamp': 1427219242,
'upload_date': '20150324',
},
'params': {
# rtmp download
'skip_download': True,
}
}, {
# article with single video embedded with data-playable containing XML playlist
# with direct video links as progressiveDownloadUrl (for now these are extracted)
# and playlist with f4m and m3u8 as streamingUrl
'url': 'http://www.bbc.com/turkce/haberler/2015/06/150615_telabyad_kentin_cogu',
'info_dict': {
'id': '150615_telabyad_kentin_cogu',
'ext': 'mp4',
'title': "YPG: Tel Abyad'ın tamamı kontrolümüzde",
'description': 'md5:33a4805a855c9baf7115fcbde57e7025',
'timestamp': 1434397334,
'upload_date': '20150615',
},
'params': {
'skip_download': True,
}
}, {
# single video embedded with data-playable containing XML playlists (regional section)
'url': 'http://www.bbc.com/mundo/video_fotos/2015/06/150619_video_honduras_militares_hospitales_corrupcion_aw',
'info_dict': {
'id': '150619_video_honduras_militares_hospitales_corrupcion_aw',
'ext': 'mp4',
'title': 'Honduras militariza sus hospitales por nuevo escándalo de corrupción',
'description': 'md5:1525f17448c4ee262b64b8f0c9ce66c8',
'timestamp': 1434713142,
'upload_date': '20150619',
},
'params': {
'skip_download': True,
}
}, {
# single video from video playlist embedded with vxp-playlist-data JSON
'url': 'http://www.bbc.com/news/video_and_audio/must_see/33376376',
'info_dict': {
'id': 'p02w6qjc',
'ext': 'mp4',
'title': '''Judge Mindy Glazer: "I'm sorry to see you here... I always wondered what happened to you"''',
'duration': 56,
'description': '''Judge Mindy Glazer: "I'm sorry to see you here... I always wondered what happened to you"''',
},
'params': {
'skip_download': True,
}
}, {
# single video story with digitalData
'url': 'http://www.bbc.com/travel/story/20150625-sri-lankas-spicy-secret',
'info_dict': {
'id': 'p02q6gc4',
'ext': 'flv',
'title': 'Sri Lanka’s spicy secret',
'description': 'As a new train line to Jaffna opens up the country’s north, travellers can experience a truly distinct slice of Tamil culture.',
'timestamp': 1437674293,
'upload_date': '20150723',
},
'params': {
# rtmp download
'skip_download': True,
}
}, {
# single video story without digitalData
'url': 'http://www.bbc.com/autos/story/20130513-hyundais-rock-star',
'info_dict': {
'id': 'p018zqqg',
'ext': 'mp4',
'title': 'Hyundai Santa Fe Sport: Rock star',
'description': 'md5:b042a26142c4154a6e472933cf20793d',
'timestamp': 1415867444,
'upload_date': '20141113',
},
'params': {
# rtmp download
'skip_download': True,
}
}, {
# single video embedded with Morph
'url': 'http://www.bbc.co.uk/sport/live/olympics/36895975',
'info_dict': {
'id': 'p041vhd0',
'ext': 'mp4',
'title': "Nigeria v Japan - Men's First Round",
'description': 'Live coverage of the first round from Group B at the Amazonia Arena.',
'duration': 7980,
'uploader': 'BBC Sport',
'uploader_id': 'bbc_sport',
},
'params': {
# m3u8 download
'skip_download': True,
},
'skip': 'Georestricted to UK',
}, {
# single video with playlist.sxml URL in playlist param
'url': 'http://www.bbc.com/sport/0/football/33653409',
'info_dict': {
'id': 'p02xycnp',
'ext': 'mp4',
'title': 'Transfers: Cristiano Ronaldo to Man Utd, Arsenal to spend?',
'description': 'BBC Sport\'s David Ornstein has the latest transfer gossip, including rumours of a Manchester United return for Cristiano Ronaldo.',
'duration': 140,
},
'params': {
# rtmp download
'skip_download': True,
}
}, {
# article with multiple videos embedded with playlist.sxml in playlist param
'url': 'http://www.bbc.com/sport/0/football/34475836',
'info_dict': {
'id': '34475836',
'title': 'Jurgen Klopp: Furious football from a witty and winning coach',
'description': 'Fast-paced football, wit, wisdom and a ready smile - why Liverpool fans should come to love new boss Jurgen Klopp.',
},
'playlist_count': 3,
}, {
# school report article with single video
'url': 'http://www.bbc.co.uk/schoolreport/35744779',
'info_dict': {
'id': '35744779',
'title': 'School which breaks down barriers in Jerusalem',
},
'playlist_count': 1,
}, {
# single video with playlist URL from weather section
'url': 'http://www.bbc.com/weather/features/33601775',
'only_matching': True,
}, {
# custom redirection to www.bbc.com
'url': 'http://www.bbc.co.uk/news/science-environment-33661876',
'only_matching': True,
}, {
# single video article embedded with data-media-vpid
'url': 'http://www.bbc.co.uk/sport/rowing/35908187',
'only_matching': True,
}, {
'url': 'https://www.bbc.co.uk/bbcthree/clip/73d0bbd0-abc3-4cea-b3c0-cdae21905eb1',
'info_dict': {
'id': 'p06556y7',
'ext': 'mp4',
'title': 'Transfers: Cristiano Ronaldo to Man Utd, Arsenal to spend?',
'description': 'md5:4b7dfd063d5a789a1512e99662be3ddd',
},
'params': {
'skip_download': True,
}
}, {
# window.__PRELOADED_STATE__
'url': 'https://www.bbc.co.uk/radio/play/b0b9z4yl',
'info_dict': {
'id': 'b0b9z4vz',
'ext': 'mp4',
'title': 'Prom 6: An American in Paris and Turangalila',
'description': 'md5:51cf7d6f5c8553f197e58203bc78dff8',
'uploader': 'Radio 3',
'uploader_id': 'bbc_radio_three',
},
}, {
'url': 'http://www.bbc.co.uk/learningenglish/chinese/features/lingohack/ep-181227',
'info_dict': {
'id': 'p06w9tws',
'ext': 'mp4',
'title': 'md5:2fabf12a726603193a2879a055f72514',
'description': 'Learn English words and phrases from this story',
},
'add_ie': [BBCCoUkIE.ie_key()],
}]
@classmethod
def suitable(cls, url):
EXCLUDE_IE = (BBCCoUkIE, BBCCoUkArticleIE, BBCCoUkIPlayerPlaylistIE, BBCCoUkPlaylistIE)
return (False if any(ie.suitable(url) for ie in EXCLUDE_IE)
else super(BBCIE, cls).suitable(url))
def _extract_from_media_meta(self, media_meta, video_id):
# Direct links to media in media metadata (e.g.
# http://www.bbc.com/turkce/haberler/2015/06/150615_telabyad_kentin_cogu)
# TODO: there are also f4m and m3u8 streams incorporated in playlist.sxml
source_files = media_meta.get('sourceFiles')
if source_files:
return [{
'url': f['url'],
'format_id': format_id,
'ext': f.get('encoding'),
'tbr': float_or_none(f.get('bitrate'), 1000),
'filesize': int_or_none(f.get('filesize')),
} for format_id, f in source_files.items() if f.get('url')], []
programme_id = media_meta.get('externalId')
if programme_id:
return self._download_media_selector(programme_id)
# Process playlist.sxml as legacy playlist
href = media_meta.get('href')
if href:
playlist = self._download_legacy_playlist_url(href)
_, _, _, _, formats, subtitles = self._extract_from_legacy_playlist(playlist, video_id)
return formats, subtitles
return [], []
def _extract_from_playlist_sxml(self, url, playlist_id, timestamp):
programme_id, title, description, duration, formats, subtitles = \
self._process_legacy_playlist_url(url, playlist_id)
self._sort_formats(formats)
return {
'id': programme_id,
'title': title,
'description': description,
'duration': duration,
'timestamp': timestamp,
'formats': formats,
'subtitles': subtitles,
}
def _real_extract(self, url):
playlist_id = self._match_id(url)
webpage = self._download_webpage(url, playlist_id)
json_ld_info = self._search_json_ld(webpage, playlist_id, default={})
timestamp = json_ld_info.get('timestamp')
playlist_title = json_ld_info.get('title')
if not playlist_title:
playlist_title = self._og_search_title(
webpage, default=None) or self._html_search_regex(
r'<title>(.+?)</title>', webpage, 'playlist title', default=None)
if playlist_title:
playlist_title = re.sub(r'(.+)\s*-\s*BBC.*?$', r'\1', playlist_title).strip()
playlist_description = json_ld_info.get(
'description') or self._og_search_description(webpage, default=None)
if not timestamp:
timestamp = parse_iso8601(self._search_regex(
[r'<meta[^>]+property="article:published_time"[^>]+content="([^"]+)"',
r'itemprop="datePublished"[^>]+datetime="([^"]+)"',
r'"datePublished":\s*"([^"]+)'],
webpage, 'date', default=None))
entries = []
# article with multiple videos embedded with playlist.sxml (e.g.
# http://www.bbc.com/sport/0/football/34475836)
playlists = re.findall(r'<param[^>]+name="playlist"[^>]+value="([^"]+)"', webpage)
playlists.extend(re.findall(r'data-media-id="([^"]+/playlist\.sxml)"', webpage))
if playlists:
entries = [
self._extract_from_playlist_sxml(playlist_url, playlist_id, timestamp)
for playlist_url in playlists]
# news article with multiple videos embedded with data-playable
data_playables = re.findall(r'data-playable=(["\'])({.+?})\1', webpage)
if data_playables:
for _, data_playable_json in data_playables:
data_playable = self._parse_json(
unescapeHTML(data_playable_json), playlist_id, fatal=False)
if not data_playable:
continue
settings = data_playable.get('settings', {})
if settings:
# data-playable with video vpid in settings.playlistObject.items (e.g.
# http://www.bbc.com/news/world-us-canada-34473351)
playlist_object = settings.get('playlistObject', {})
if playlist_object:
items = playlist_object.get('items')
if items and isinstance(items, list):
title = playlist_object['title']
description = playlist_object.get('summary')
duration = int_or_none(items[0].get('duration'))
programme_id = items[0].get('vpid')
formats, subtitles = self._download_media_selector(programme_id)
self._sort_formats(formats)
entries.append({
'id': programme_id,
'title': title,
'description': description,
'timestamp': timestamp,
'duration': duration,
'formats': formats,
'subtitles': subtitles,
})
else:
# data-playable without vpid but with a playlist.sxml URLs
# in otherSettings.playlist (e.g.
# http://www.bbc.com/turkce/multimedya/2015/10/151010_vid_ankara_patlama_ani)
playlist = data_playable.get('otherSettings', {}).get('playlist', {})
if playlist:
entry = None
for key in ('streaming', 'progressiveDownload'):
playlist_url = playlist.get('%sUrl' % key)
if not playlist_url:
continue
try:
info = self._extract_from_playlist_sxml(
playlist_url, playlist_id, timestamp)
if not entry:
entry = info
else:
entry['title'] = info['title']
entry['formats'].extend(info['formats'])
except Exception as e:
# Some playlist URL may fail with 500, at the same time
# the other one may work fine (e.g.
# http://www.bbc.com/turkce/haberler/2015/06/150615_telabyad_kentin_cogu)
if isinstance(e.cause, compat_HTTPError) and e.cause.code == 500:
continue
raise
if entry:
self._sort_formats(entry['formats'])
entries.append(entry)
if entries:
return self.playlist_result(entries, playlist_id, playlist_title, playlist_description)
# http://www.bbc.co.uk/learningenglish/chinese/features/lingohack/ep-181227
group_id = self._search_regex(
r'<div[^>]+\bclass=["\']video["\'][^>]+\bdata-pid=["\'](%s)' % self._ID_REGEX,
webpage, 'group id', default=None)
if group_id:
return self.url_result(
'https://www.bbc.co.uk/programmes/%s' % group_id,
ie=BBCCoUkIE.ie_key())
# single video story (e.g. http://www.bbc.com/travel/story/20150625-sri-lankas-spicy-secret)
programme_id = self._search_regex(
[r'data-(?:video-player|media)-vpid="(%s)"' % self._ID_REGEX,
r'<param[^>]+name="externalIdentifier"[^>]+value="(%s)"' % self._ID_REGEX,
r'videoId\s*:\s*["\'](%s)["\']' % self._ID_REGEX],
webpage, 'vpid', default=None)
if programme_id:
formats, subtitles = self._download_media_selector(programme_id)
self._sort_formats(formats)
# digitalData may be missing (e.g. http://www.bbc.com/autos/story/20130513-hyundais-rock-star)
digital_data = self._parse_json(
self._search_regex(
r'var\s+digitalData\s*=\s*({.+?});?\n', webpage, 'digital data', default='{}'),
programme_id, fatal=False)
page_info = digital_data.get('page', {}).get('pageInfo', {})
title = page_info.get('pageName') or self._og_search_title(webpage)
description = page_info.get('description') or self._og_search_description(webpage)
timestamp = parse_iso8601(page_info.get('publicationDate')) or timestamp
return {
'id': programme_id,
'title': title,
'description': description,
'timestamp': timestamp,
'formats': formats,
'subtitles': subtitles,
}
# Morph based embed (e.g. http://www.bbc.co.uk/sport/live/olympics/36895975)
# There are several setPayload calls may be present but the video
# seems to be always related to the first one
morph_payload = self._parse_json(
self._search_regex(
r'Morph\.setPayload\([^,]+,\s*({.+?})\);',
webpage, 'morph payload', default='{}'),
playlist_id, fatal=False)
if morph_payload:
components = try_get(morph_payload, lambda x: x['body']['components'], list) or []
for component in components:
if not isinstance(component, dict):
continue
lead_media = try_get(component, lambda x: x['props']['leadMedia'], dict)
if not lead_media:
continue
identifiers = lead_media.get('identifiers')
if not identifiers or not isinstance(identifiers, dict):
continue
programme_id = identifiers.get('vpid') or identifiers.get('playablePid')
if not programme_id:
continue
title = lead_media.get('title') or self._og_search_title(webpage)
formats, subtitles = self._download_media_selector(programme_id)
self._sort_formats(formats)
description = lead_media.get('summary')
uploader = lead_media.get('masterBrand')
uploader_id = lead_media.get('mid')
duration = None
duration_d = lead_media.get('duration')
if isinstance(duration_d, dict):
duration = parse_duration(dict_get(
duration_d, ('rawDuration', 'formattedDuration', 'spokenDuration')))
return {
'id': programme_id,
'title': title,
'description': description,
'duration': duration,
'uploader': uploader,
'uploader_id': uploader_id,
'formats': formats,
'subtitles': subtitles,
}
preload_state = self._parse_json(self._search_regex(
r'window\.__PRELOADED_STATE__\s*=\s*({.+?});', webpage,
'preload state', default='{}'), playlist_id, fatal=False)
if preload_state:
current_programme = preload_state.get('programmes', {}).get('current') or {}
programme_id = current_programme.get('id')
if current_programme and programme_id and current_programme.get('type') == 'playable_item':
title = current_programme.get('titles', {}).get('tertiary') or playlist_title
formats, subtitles = self._download_media_selector(programme_id)
self._sort_formats(formats)
synopses = current_programme.get('synopses') or {}
network = current_programme.get('network') or {}
duration = int_or_none(
current_programme.get('duration', {}).get('value'))
thumbnail = None
image_url = current_programme.get('image_url')
if image_url:
thumbnail = image_url.replace('{recipe}', '1920x1920')
return {
'id': programme_id,
'title': title,
'description': dict_get(synopses, ('long', 'medium', 'short')),
'thumbnail': thumbnail,
'duration': duration,
'uploader': network.get('short_title'),
'uploader_id': network.get('id'),
'formats': formats,
'subtitles': subtitles,
}
bbc3_config = self._parse_json(
self._search_regex(
r'(?s)bbcthreeConfig\s*=\s*({.+?})\s*;\s*<', webpage,
'bbcthree config', default='{}'),
playlist_id, transform_source=js_to_json, fatal=False) or {}
payload = bbc3_config.get('payload') or {}
if payload:
clip = payload.get('currentClip') or {}
clip_vpid = clip.get('vpid')
clip_title = clip.get('title')
if clip_vpid and clip_title:
formats, subtitles = self._download_media_selector(clip_vpid)
self._sort_formats(formats)
return {
'id': clip_vpid,
'title': clip_title,
'thumbnail': dict_get(clip, ('poster', 'imageUrl')),
'description': clip.get('description'),
'duration': parse_duration(clip.get('duration')),
'formats': formats,
'subtitles': subtitles,
}
bbc3_playlist = try_get(
payload, lambda x: x['content']['bbcMedia']['playlist'],
dict)
if bbc3_playlist:
playlist_title = bbc3_playlist.get('title') or playlist_title
thumbnail = bbc3_playlist.get('holdingImageURL')
entries = []
for bbc3_item in bbc3_playlist['items']:
programme_id = bbc3_item.get('versionID')
if not programme_id:
continue
formats, subtitles = self._download_media_selector(programme_id)
self._sort_formats(formats)
entries.append({
'id': programme_id,
'title': playlist_title,
'thumbnail': thumbnail,
'timestamp': timestamp,
'formats': formats,
'subtitles': subtitles,
})
return self.playlist_result(
entries, playlist_id, playlist_title, playlist_description)
initial_data = self._parse_json(self._search_regex(
r'window\.__INITIAL_DATA__\s*=\s*({.+?});', webpage,
'preload state', default='{}'), playlist_id, fatal=False)
if initial_data:
def parse_media(media):
if not media:
return
for item in (try_get(media, lambda x: x['media']['items'], list) or []):
item_id = item.get('id')
item_title = item.get('title')
if not (item_id and item_title):
continue
formats, subtitles = self._download_media_selector(item_id)
self._sort_formats(formats)
entries.append({
'id': item_id,
'title': item_title,
'thumbnail': item.get('holdingImageUrl'),
'formats': formats,
'subtitles': subtitles,
})
for resp in (initial_data.get('data') or {}).values():
name = resp.get('name')
if name == 'media-experience':
parse_media(try_get(resp, lambda x: x['data']['initialItem']['mediaItem'], dict))
elif name == 'article':
for block in (try_get(resp, lambda x: x['data']['blocks'], list) or []):
if block.get('type') != 'media':
continue
parse_media(block.get('model'))
return self.playlist_result(
entries, playlist_id, playlist_title, playlist_description)
def extract_all(pattern):
return list(filter(None, map(
lambda s: self._parse_json(s, playlist_id, fatal=False),
re.findall(pattern, webpage))))
# Multiple video article (e.g.
# http://www.bbc.co.uk/blogs/adamcurtis/entries/3662a707-0af9-3149-963f-47bea720b460)
EMBED_URL = r'https?://(?:www\.)?bbc\.co\.uk/(?:[^/]+/)+%s(?:\b[^"]+)?' % self._ID_REGEX
entries = []
for match in extract_all(r'new\s+SMP\(({.+?})\)'):
embed_url = match.get('playerSettings', {}).get('externalEmbedUrl')
if embed_url and re.match(EMBED_URL, embed_url):
entries.append(embed_url)
entries.extend(re.findall(
r'setPlaylist\("(%s)"\)' % EMBED_URL, webpage))
if entries:
return self.playlist_result(
[self.url_result(entry_, 'BBCCoUk') for entry_ in entries],
playlist_id, playlist_title, playlist_description)
# Multiple video article (e.g. http://www.bbc.com/news/world-europe-32668511)
medias = extract_all(r"data-media-meta='({[^']+})'")
if not medias:
# Single video article (e.g. http://www.bbc.com/news/video_and_audio/international)
media_asset = self._search_regex(
r'mediaAssetPage\.init\(\s*({.+?}), "/',
webpage, 'media asset', default=None)
if media_asset:
media_asset_page = self._parse_json(media_asset, playlist_id, fatal=False)
medias = []
for video in media_asset_page.get('videos', {}).values():
medias.extend(video.values())
if not medias:
# Multiple video playlist with single `now playing` entry (e.g.
# http://www.bbc.com/news/video_and_audio/must_see/33767813)
vxp_playlist = self._parse_json(
self._search_regex(
r'<script[^>]+class="vxp-playlist-data"[^>]+type="application/json"[^>]*>([^<]+)</script>',
webpage, 'playlist data'),
playlist_id)
playlist_medias = []
for item in vxp_playlist:
media = item.get('media')
if not media:
continue
playlist_medias.append(media)
# Download single video if found media with asset id matching the video id from URL
if item.get('advert', {}).get('assetId') == playlist_id:
medias = [media]
break
# Fallback to the whole playlist
if not medias:
medias = playlist_medias
entries = []
for num, media_meta in enumerate(medias, start=1):
formats, subtitles = self._extract_from_media_meta(media_meta, playlist_id)
if not formats:
continue
self._sort_formats(formats)
video_id = media_meta.get('externalId')
if not video_id:
video_id = playlist_id if len(medias) == 1 else '%s-%s' % (playlist_id, num)
title = media_meta.get('caption')
if not title:
title = playlist_title if len(medias) == 1 else '%s - Video %s' % (playlist_title, num)
duration = int_or_none(media_meta.get('durationInSeconds')) or parse_duration(media_meta.get('duration'))
images = []
for image in media_meta.get('images', {}).values():
images.extend(image.values())
if 'image' in media_meta:
images.append(media_meta['image'])
thumbnails = [{
'url': image.get('href'),
'width': int_or_none(image.get('width')),
'height': int_or_none(image.get('height')),
} for image in images]
entries.append({
'id': video_id,
'title': title,
'thumbnails': thumbnails,
'duration': duration,
'timestamp': timestamp,
'formats': formats,
'subtitles': subtitles,
})
return self.playlist_result(entries, playlist_id, playlist_title, playlist_description)
class BBCCoUkArticleIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?bbc\.co\.uk/programmes/articles/(?P<id>[a-zA-Z0-9]+)'
IE_NAME = 'bbc.co.uk:article'
IE_DESC = 'BBC articles'
_TEST = {
'url': 'http://www.bbc.co.uk/programmes/articles/3jNQLTMrPlYGTBn0WV6M2MS/not-your-typical-role-model-ada-lovelace-the-19th-century-programmer',
'info_dict': {
'id': '3jNQLTMrPlYGTBn0WV6M2MS',
'title': 'Calculating Ada: The Countess of Computing - Not your typical role model: Ada Lovelace the 19th century programmer - BBC Four',
'description': 'Hannah Fry reveals some of her surprising discoveries about Ada Lovelace during filming.',
},
'playlist_count': 4,
'add_ie': ['BBCCoUk'],
}
def _real_extract(self, url):
playlist_id = self._match_id(url)
webpage = self._download_webpage(url, playlist_id)
title = self._og_search_title(webpage)
description = self._og_search_description(webpage).strip()
entries = [self.url_result(programme_url) for programme_url in re.findall(
r'<div[^>]+typeof="Clip"[^>]+resource="([^"]+)"', webpage)]
return self.playlist_result(entries, playlist_id, title, description)
class BBCCoUkPlaylistBaseIE(InfoExtractor):
def _entries(self, webpage, url, playlist_id):
single_page = 'page' in compat_urlparse.parse_qs(
compat_urlparse.urlparse(url).query)
for page_num in itertools.count(2):
for video_id in re.findall(
self._VIDEO_ID_TEMPLATE % BBCCoUkIE._ID_REGEX, webpage):
yield self.url_result(
self._URL_TEMPLATE % video_id, BBCCoUkIE.ie_key())
if single_page:
return
next_page = self._search_regex(
r'<li[^>]+class=(["\'])pagination_+next\1[^>]*><a[^>]+href=(["\'])(?P<url>(?:(?!\2).)+)\2',
webpage, 'next page url', default=None, group='url')
if not next_page:
break
webpage = self._download_webpage(
compat_urlparse.urljoin(url, next_page), playlist_id,
'Downloading page %d' % page_num, page_num)
def _real_extract(self, url):
playlist_id = self._match_id(url)
webpage = self._download_webpage(url, playlist_id)
title, description = self._extract_title_and_description(webpage)
return self.playlist_result(
self._entries(webpage, url, playlist_id),
playlist_id, title, description)
class BBCCoUkIPlayerPlaylistIE(BBCCoUkPlaylistBaseIE):
IE_NAME = 'bbc.co.uk:iplayer:playlist'
_VALID_URL = r'https?://(?:www\.)?bbc\.co\.uk/iplayer/(?:episodes|group)/(?P<id>%s)' % BBCCoUkIE._ID_REGEX
_URL_TEMPLATE = 'http://www.bbc.co.uk/iplayer/episode/%s'
_VIDEO_ID_TEMPLATE = r'data-ip-id=["\'](%s)'
_TESTS = [{
'url': 'http://www.bbc.co.uk/iplayer/episodes/b05rcz9v',
'info_dict': {
'id': 'b05rcz9v',
'title': 'The Disappearance',
'description': 'French thriller serial about a missing teenager.',
},
'playlist_mincount': 6,
'skip': 'This programme is not currently available on BBC iPlayer',
}, {
# Available for over a year unlike 30 days for most other programmes
'url': 'http://www.bbc.co.uk/iplayer/group/p02tcc32',
'info_dict': {
'id': 'p02tcc32',
'title': 'Bohemian Icons',
'description': 'md5:683e901041b2fe9ba596f2ab04c4dbe7',
},
'playlist_mincount': 10,
}]
def _extract_title_and_description(self, webpage):
title = self._search_regex(r'<h1>([^<]+)</h1>', webpage, 'title', fatal=False)
description = self._search_regex(
r'<p[^>]+class=(["\'])subtitle\1[^>]*>(?P<value>[^<]+)</p>',
webpage, 'description', fatal=False, group='value')
return title, description
class BBCCoUkPlaylistIE(BBCCoUkPlaylistBaseIE):
IE_NAME = 'bbc.co.uk:playlist'
_VALID_URL = r'https?://(?:www\.)?bbc\.co\.uk/programmes/(?P<id>%s)/(?:episodes|broadcasts|clips)' % BBCCoUkIE._ID_REGEX
_URL_TEMPLATE = 'http://www.bbc.co.uk/programmes/%s'
_VIDEO_ID_TEMPLATE = r'data-pid=["\'](%s)'
_TESTS = [{
'url': 'http://www.bbc.co.uk/programmes/b05rcz9v/clips',
'info_dict': {
'id': 'b05rcz9v',
'title': 'The Disappearance - Clips - BBC Four',
'description': 'French thriller serial about a missing teenager.',
},
'playlist_mincount': 7,
}, {
# multipage playlist, explicit page
'url': 'http://www.bbc.co.uk/programmes/b00mfl7n/clips?page=1',
'info_dict': {
'id': 'b00mfl7n',
'title': 'Frozen Planet - Clips - BBC One',
'description': 'md5:65dcbf591ae628dafe32aa6c4a4a0d8c',
},
'playlist_mincount': 24,
}, {
# multipage playlist, all pages
'url': 'http://www.bbc.co.uk/programmes/b00mfl7n/clips',
'info_dict': {
'id': 'b00mfl7n',
'title': 'Frozen Planet - Clips - BBC One',
'description': 'md5:65dcbf591ae628dafe32aa6c4a4a0d8c',
},
'playlist_mincount': 142,
}, {
'url': 'http://www.bbc.co.uk/programmes/b05rcz9v/broadcasts/2016/06',
'only_matching': True,
}, {
'url': 'http://www.bbc.co.uk/programmes/b05rcz9v/clips',
'only_matching': True,
}, {
'url': 'http://www.bbc.co.uk/programmes/b055jkys/episodes/player',
'only_matching': True,
}]
def _extract_title_and_description(self, webpage):
title = self._og_search_title(webpage, fatal=False)
description = self._og_search_description(webpage)
return title, description
|
rbrito/pkg-youtube-dl
|
youtube_dl/extractor/bbc.py
|
Python
|
unlicense
| 60,023
|
[
"VisIt"
] |
a893f4a1468f765b1002f3bb560c7b7fe2881e417b9a511f5d60564ae95b9740
|
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
from __future__ import unicode_literals
import unittest
import os
from pymatgen.analysis.pourbaix.entry import PourbaixEntry, IonEntry, MultiEntry
from pymatgen.analysis.pourbaix.entry import PourbaixEntryIO
from pymatgen.analysis.phase_diagram import PDEntry
from pymatgen.core.ion import Ion
from pymatgen.core.structure import Composition
from pymatgen.core.periodic_table import Element
class TestPourbaixEntry(unittest.TestCase):
"""
Test all functions using a fictitious entry
"""
def setUp(self):
comp = Composition("Mn2O3")
self.solentry = PDEntry(comp, 49)
ion = Ion.from_formula("MnO4-")
self.ionentry = IonEntry(ion, 25)
self.PxIon = PourbaixEntry(self.ionentry)
self.PxSol = PourbaixEntry(self.solentry)
self.PxIon.conc = 1e-4
def test_pourbaix_entry(self):
self.assertEqual(self.PxIon.entry.energy, 25, "Wrong Energy!")
self.assertEqual(self.PxIon.entry.name,\
"MnO4[-]", "Wrong Entry!")
self.assertEqual(self.PxSol.entry.energy, 49, "Wrong Energy!")
self.assertEqual(self.PxSol.entry.name,\
"Mn2O3", "Wrong Entry!")
self.assertEqual(self.PxIon.g0, 25, "Wrong Energy!")
self.assertEqual(self.PxSol.g0, 49, "Wrong Energy!")
self.assertEqual(self.PxIon.conc, 1e-4, "Wrong concentration!")
def test_calc_coeff_terms(self):
self.assertEqual(self.PxIon.npH, -8, "Wrong npH!")
self.assertEqual(self.PxIon.nPhi, -7, "Wrong nPhi!")
self.assertEqual(self.PxIon.nH2O, 4, "Wrong nH2O!")
self.assertEqual(self.PxSol.npH, -6, "Wrong npH!")
self.assertEqual(self.PxSol.nPhi, -6, "Wrong nPhi!")
self.assertEqual(self.PxSol.nH2O, 3, "Wrong nH2O!")
def test_to_from_dict(self):
d = self.PxIon.as_dict()
ion_entry = self.PxIon.from_dict(d)
self.assertEqual(ion_entry.entry.name, "MnO4[-]", "Wrong Entry!")
class MultiEntryTest(unittest.TestCase):
"""
Test MultiEntry using fictitious entries
"""
def setUp(self):
entrylist = list()
weights = list()
comp = Composition("Mn2O3")
entry = PDEntry(comp, 49)
entrylist.append(PourbaixEntry(entry))
weights.append(1.0)
comp = Ion.from_formula("MnO4[-]")
entry = IonEntry(comp, 25)
entrylist.append(PourbaixEntry(entry))
weights.append(0.25)
comp = Composition("Fe2O3")
entry = PDEntry(comp, 50)
entrylist.append(PourbaixEntry(entry))
weights.append(0.5)
comp = Ion.from_formula("Fe[2+]")
entry = IonEntry(comp, 15)
entrylist.append(PourbaixEntry(entry))
weights.append(2.5)
comp = Ion.from_formula("Fe[3+]")
entry = IonEntry(comp, 20)
entrylist.append(PourbaixEntry(entry))
weights.append(1.5)
self.weights = weights
self.entrylist = entrylist
self.multientry = MultiEntry(entrylist, weights)
def test_multi_entry(self):
sum_g0 = 0.0
sum_npH = 0.0
sum_nPhi = 0.0
sum_nH2O = 0.0
for w, e in zip(self.weights, self.entrylist):
sum_g0 += w * e.g0
sum_npH += w * e.npH
sum_nPhi += w * e.nPhi
sum_nH2O += w * e.nH2O
self.assertAlmostEqual(sum_g0, self.multientry.g0, "g0 doesn't match")
self.assertAlmostEqual(sum_npH, self.multientry.npH, "npH doesn't match")
self.assertAlmostEqual(sum_nPhi, self.multientry.nPhi, "nPhi doesn't match")
self.assertAlmostEqual(sum_nH2O, self.multientry.nH2O, "nH2O doesn't match")
class IonEntryTest(unittest.TestCase):
"""
Test IonEntry using fictitious entry
"""
def setUp(self):
ion = Ion.from_formula("MnO4[-]")
self.entry = IonEntry(ion, 49)
def test_get_energy(self):
self.assertEqual(self.entry.energy, 49, "Wrong energy!")
def test_get_name(self):
self.assertEqual(self.entry.name, 'MnO4[-]', "Wrong name!")
def test_get_composition(self):
comp = self.entry.composition
expected_comp = Ion.from_formula('MnO4[-]')
self.assertEqual(comp, expected_comp, "Wrong composition!")
def test_to_from_dict(self):
d = self.entry.as_dict()
entry = IonEntry.from_dict(d)
self.assertEqual(entry.name, 'MnO4[-]', "Wrong name!")
self.assertEqual(entry.energy_per_atom, 49.0 / 5)
class TestPourbaixEntryIO(unittest.TestCase):
"""
Test Pourbaix Entry IO class
"""
def test_read_write_csv(self):
Zn_solids = ["Zn", "ZnO", "ZnO2"]
sol_g = [0.0, -3.338, -1.315]
Zn_ions = ["Zn[2+]", "ZnOH[+]", "HZnO2[-]", "ZnO2[2-]", "ZnO"]
liq_g = [-1.527, -3.415, -4.812, -4.036, -2.921]
liq_conc = [1e-6, 1e-6, 1e-6, 1e-6, 1e-6]
solid_entry = list()
for sol in Zn_solids:
comp = Composition(sol)
delg = sol_g[Zn_solids.index(sol)]
solid_entry.append(PourbaixEntry(PDEntry(comp, delg)))
ion_entry = list()
for ion in Zn_ions:
comp_ion = Ion.from_formula(ion)
delg = liq_g[Zn_ions.index(ion)]
conc = liq_conc[Zn_ions.index(ion)]
PoE = PourbaixEntry(IonEntry(comp_ion, delg))
PoE.conc = conc
ion_entry.append(PoE)
entries = solid_entry + ion_entry
PourbaixEntryIO.to_csv("pourbaix_test_entries.csv", entries)
(elements, entries) = PourbaixEntryIO.from_csv(
"pourbaix_test_entries.csv")
self.assertEqual(elements,
[Element('Zn'), Element('H'), Element('O')],
"Wrong elements!")
self.assertEqual(len(entries), 8, "Wrong number of entries!")
os.remove("pourbaix_test_entries.csv")
if __name__ == '__main__':
unittest.main()
|
setten/pymatgen
|
pymatgen/analysis/pourbaix/tests/test_entry.py
|
Python
|
mit
| 6,029
|
[
"pymatgen"
] |
9c0193fb679ac73b88a1bc45e9061c30199c9f6480c9e1b1d23ff3c60ac2af57
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.views.generic import TemplateView
from malepierre.characters.views import CharacterViewSet
from rest_framework.routers import DefaultRouter
router = DefaultRouter()
router.register(r'characters', CharacterViewSet)
urlpatterns = [
url(r'^$', TemplateView.as_view(template_name='pages/home.html'), name="home"),
url(r'^about/$', TemplateView.as_view(template_name='pages/about.html'), name="about"),
# Django Admin
url(r'^admin/', include(admin.site.urls)),
# User management
url(r'^users/', include("malepierre.users.urls", namespace="users")),
url(r'^accounts/', include('allauth.urls')),
# Your stuff: custom urls includes go here
url(r'^api/', include(router.urls)),
url(r'^', include('malepierre.characters.urls')),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG:
# This allows the error pages to be debugged during development, just visit
# these url in browser to see how these error pages look like.
urlpatterns += [
url(r'^400/$', 'django.views.defaults.bad_request'),
url(r'^403/$', 'django.views.defaults.permission_denied'),
url(r'^404/$', 'django.views.defaults.page_not_found'),
url(r'^500/$', 'django.views.defaults.server_error'),
]
|
EliotBerriot/malepierre
|
config/urls.py
|
Python
|
bsd-3-clause
| 1,510
|
[
"VisIt"
] |
a925c30cfb30c6f55dd2f57b1c6551e44139b9b43089898de70b2892499fb6e3
|
"""
=================
:mod:`crossovers`
=================
.. Copyright 2012 Aaron Garrett
.. Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
.. The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
.. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
.. module:: crossovers
.. moduleauthor:: Aaron Garrett <garrett@inspiredintelligence.io>
"""
import copy
import functools
import math
try:
import cPickle as pickle
except ImportError:
import pickle
def crossover(cross):
"""Return an inspyred crossover function based on the given function.
This function generator takes a function that operates on only
two parent candidates to produce an iterable sequence of offspring
(typically two). The generator handles the pairing of selected
parents and collecting of all offspring.
The generated function chooses every odd candidate as a 'mom' and
every even as a 'dad' (discounting the last candidate if there is
an odd number). For each mom-dad pair, offspring are produced via
the `cross` function.
The given function ``cross`` must have the following signature::
offspring = cross(random, mom, dad, args)
This function is most commonly used as a function decorator with
the following usage::
@crossover
def cross(random, mom, dad, args):
# Implementation of paired crossing
pass
The generated function also contains an attribute named
``single_crossover`` which holds the original crossover function.
In this way, the original single-set-of-parents function can be
retrieved if necessary.
"""
@functools.wraps(cross)
def inspyred_crossover(random, candidates, args):
if len(candidates) % 2 == 1:
candidates = candidates[:-1]
moms = candidates[::2]
dads = candidates[1::2]
children = []
for i, (mom, dad) in enumerate(zip(moms, dads)):
cross.index = i
offspring = cross(random, mom, dad, args)
for o in offspring:
children.append(o)
return children
inspyred_crossover.single_crossover = cross
return inspyred_crossover
@crossover
def n_point_crossover(random, mom, dad, args):
"""Return the offspring of n-point crossover on the candidates.
This function performs n-point crossover (NPX). It selects *n*
random points without replacement at which to 'cut' the candidate
solutions and recombine them.
.. Arguments:
random -- the random number generator object
mom -- the first parent candidate
dad -- the second parent candidate
args -- a dictionary of keyword arguments
Optional keyword arguments in args:
- *crossover_rate* -- the rate at which crossover is performed
(default 1.0)
- *num_crossover_points* -- the number of crossover points used (default 1)
"""
crossover_rate = args.setdefault('crossover_rate', 1.0)
num_crossover_points = args.setdefault('num_crossover_points', 1)
children = []
if random.random() < crossover_rate:
num_cuts = min(len(mom)-1, num_crossover_points)
cut_points = random.sample(range(1, len(mom)), num_cuts)
cut_points.sort()
bro = copy.copy(dad)
sis = copy.copy(mom)
normal = True
for i, (m, d) in enumerate(zip(mom, dad)):
if i in cut_points:
normal = not normal
if not normal:
bro[i] = m
sis[i] = d
normal = not normal
children.append(bro)
children.append(sis)
else:
children.append(mom)
children.append(dad)
return children
@crossover
def uniform_crossover(random, mom, dad, args):
"""Return the offspring of uniform crossover on the candidates.
This function performs uniform crossover (UX). For each element
of the parents, a biased coin is flipped to determine whether
the first offspring gets the 'mom' or the 'dad' element. An
optional keyword argument in args, ``ux_bias``, determines the bias.
.. Arguments:
random -- the random number generator object
mom -- the first parent candidate
dad -- the second parent candidate
args -- a dictionary of keyword arguments
Optional keyword arguments in args:
- *crossover_rate* -- the rate at which crossover is performed
(default 1.0)
- *ux_bias* -- the bias toward the first candidate in the crossover
(default 0.5)
"""
ux_bias = args.setdefault('ux_bias', 0.5)
crossover_rate = args.setdefault('crossover_rate', 1.0)
children = []
if random.random() < crossover_rate:
bro = copy.copy(dad)
sis = copy.copy(mom)
for i, (m, d) in enumerate(zip(mom, dad)):
if random.random() < ux_bias:
bro[i] = m
sis[i] = d
children.append(bro)
children.append(sis)
else:
children.append(mom)
children.append(dad)
return children
@crossover
def partially_matched_crossover(random, mom, dad, args):
"""Return the offspring of partially matched crossover on the candidates.
This function performs partially matched crossover (PMX). This type of
crossover assumes that candidates are composed of discrete values that
are permutations of a given set (typically integers). It produces offspring
that are themselves permutations of the set.
.. Arguments:
random -- the random number generator object
mom -- the first parent candidate
dad -- the second parent candidate
args -- a dictionary of keyword arguments
Optional keyword arguments in args:
- *crossover_rate* -- the rate at which crossover is performed
(default 1.0)
"""
crossover_rate = args.setdefault('crossover_rate', 1.0)
if random.random() < crossover_rate:
size = len(mom)
points = random.sample(range(size), 2)
x, y = min(points), max(points)
bro = copy.copy(dad)
bro[x:y+1] = mom[x:y+1]
sis = copy.copy(mom)
sis[x:y+1] = dad[x:y+1]
for parent, child in zip([dad, mom], [bro, sis]):
for i in range(x, y+1):
if parent[i] not in child[x:y+1]:
spot = i
while x <= spot <= y:
spot = parent.index(child[spot])
child[spot] = parent[i]
return [bro, sis]
else:
return [mom, dad]
@crossover
def arithmetic_crossover(random, mom, dad, args):
"""Return the offspring of arithmetic crossover on the candidates.
This function performs arithmetic crossover (AX), which is similar to a
generalized weighted averaging of the candidate elements. The allele
of each parent is weighted by the *ax_alpha* keyword argument, and
the allele of the complement parent is weighted by 1 - *ax_alpha*.
This averaging is only done on the alleles listed in the *ax_points*
keyword argument. If this argument is ``None``, then all alleles
are used. This means that if this function is used with all default
values, then offspring are simple averages of their parents.
This function also makes use of the bounder function as specified
in the EC's ``evolve`` method.
.. Arguments:
random -- the random number generator object
mom -- the first parent candidate
dad -- the second parent candidate
args -- a dictionary of keyword arguments
Optional keyword arguments in args:
- *crossover_rate* -- the rate at which crossover is performed
(default 1.0)
- *ax_alpha* -- the weight for the averaging (default 0.5)
- *ax_points* -- a list of points specifying the alleles to
recombine (default None)
"""
ax_alpha = args.setdefault('ax_alpha', 0.5)
ax_points = args.setdefault('ax_points', None)
crossover_rate = args.setdefault('crossover_rate', 1.0)
bounder = args['_ec'].bounder
children = []
if random.random() < crossover_rate:
bro = copy.copy(dad)
sis = copy.copy(mom)
if ax_points is None:
ax_points = list(range(min(len(bro), len(sis))))
for i in ax_points:
bro[i] = ax_alpha * mom[i] + (1 - ax_alpha) * dad[i]
sis[i] = ax_alpha * dad[i] + (1 - ax_alpha) * mom[i]
bro = bounder(bro, args)
sis = bounder(sis, args)
children.append(bro)
children.append(sis)
else:
children.append(mom)
children.append(dad)
return children
@crossover
def blend_crossover(random, mom, dad, args):
"""Return the offspring of blend crossover on the candidates.
This function performs blend crossover (BLX), which is similar to
arithmetic crossover with a bit of mutation. It creates offspring
whose values are chosen randomly from a range bounded by the
parent alleles but that is also extended by some amount proportional
to the *blx_alpha* keyword argument. It is this extension of the
range that provides the additional exploration. This averaging is
only done on the alleles listed in the *blx_points* keyword argument.
If this argument is ``None``, then all alleles are used. This function
also makes use of the bounder function as specified in the EC's
``evolve`` method.
.. Arguments:
random -- the random number generator object
mom -- the first parent candidate
dad -- the second parent candidate
args -- a dictionary of keyword arguments
Optional keyword arguments in args:
- *crossover_rate* -- the rate at which crossover is performed
(default 1.0)
- *blx_alpha* -- the blending rate (default 0.1)
- *blx_points* -- a list of points specifying the alleles to
recombine (default None)
"""
blx_alpha = args.setdefault('blx_alpha', 0.1)
blx_points = args.setdefault('blx_points', None)
crossover_rate = args.setdefault('crossover_rate', 1.0)
bounder = args['_ec'].bounder
children = []
if random.random() < crossover_rate:
bro = copy.copy(dad)
sis = copy.copy(mom)
if blx_points is None:
blx_points = list(range(min(len(bro), len(sis))))
for i in blx_points:
smallest, largest = min(mom[i], dad[i]), max(mom[i], dad[i])
delta = blx_alpha * (largest - smallest)
bro[i] = smallest - delta + random.random() * (largest - smallest + 2 * delta)
sis[i] = smallest - delta + random.random() * (largest - smallest + 2 * delta)
bro = bounder(bro, args)
sis = bounder(sis, args)
children.append(bro)
children.append(sis)
else:
children.append(mom)
children.append(dad)
return children
def heuristic_crossover(random, candidates, args):
"""Return the offspring of heuristic crossover on the candidates.
It performs heuristic crossover (HX), which is similar to the
update rule used in particle swarm optimization. This function
also makes use of the bounder function as specified in the EC's
``evolve`` method.
.. note::
This function assumes that candidates can be pickled (for hashing
as keys to a dictionary).
.. Arguments:
random -- the random number generator object
candidates -- the candidate solutions
args -- a dictionary of keyword arguments
Optional keyword arguments in args:
- *crossover_rate* -- the rate at which crossover is performed
(default 1.0)
"""
crossover_rate = args.setdefault('crossover_rate', 1.0)
bounder = args['_ec'].bounder
if len(candidates) % 2 == 1:
candidates = candidates[:-1]
# Since we don't have fitness information in the candidates, we need
# to make a dictionary containing the candidate and its corresponding
# individual in the population.
population = list(args['_ec'].population)
lookup = dict(zip([pickle.dumps(p.candidate, 1) for p in population], population))
moms = candidates[::2]
dads = candidates[1::2]
children = []
for mom, dad in zip(moms, dads):
if random.random() < crossover_rate:
bro = copy.copy(dad)
sis = copy.copy(mom)
mom_is_better = lookup[pickle.dumps(mom, 1)] > lookup[pickle.dumps(dad, 1)]
for i, (m, d) in enumerate(zip(mom, dad)):
negpos = 1 if mom_is_better else -1
val = d if mom_is_better else m
bro[i] = val + random.random() * negpos * (m - d)
sis[i] = val + random.random() * negpos * (m - d)
bro = bounder(bro, args)
sis = bounder(sis, args)
children.append(bro)
children.append(sis)
else:
children.append(mom)
children.append(dad)
return children
@crossover
def simulated_binary_crossover(random, mom, dad, args):
"""Return the offspring of simulated binary crossover on the candidates.
This function performs simulated binary crossover (SBX), following the
implementation in NSGA-II
`(Deb et al., ICANNGA 1999) <http://vision.ucsd.edu/~sagarwal/icannga.pdf>`_.
.. Arguments:
random -- the random number generator object
mom -- the first parent candidate
dad -- the second parent candidate
args -- a dictionary of keyword arguments
Optional keyword arguments in args:
- *crossover_rate* -- the rate at which crossover is performed
(default 1.0)
- *sbx_distribution_index* -- the non-negative distribution index
(default 10)
A small value of the `sbx_distribution_index` optional argument allows
solutions far away from parents to be created as child solutions,
while a large value restricts only near-parent solutions to be created as
child solutions.
"""
crossover_rate = args.setdefault('crossover_rate', 1.0)
if random.random() < crossover_rate:
di = args.setdefault('sbx_distribution_index', 10)
bounder = args['_ec'].bounder
bro = copy.copy(dad)
sis = copy.copy(mom)
for i, (m, d, lb, ub) in enumerate(zip(mom, dad, bounder.lower_bound, bounder.upper_bound)):
try:
if m > d:
m, d = d, m
beta = 1.0 + 2 * min(m - lb, ub - d) / float(d - m)
alpha = 2.0 - 1.0 / beta**(di + 1.0)
u = random.random()
if u <= (1.0 / alpha):
beta_q = (u * alpha)**(1.0 / float(di + 1.0))
else:
beta_q = (1.0 / (2.0 - u * alpha))**(1.0 / float(di + 1.0))
bro_val = 0.5 * ((m + d) - beta_q * (d - m))
bro_val = max(min(bro_val, ub), lb)
sis_val = 0.5 * ((m + d) + beta_q * (d - m))
sis_val = max(min(sis_val, ub), lb)
if random.random() > 0.5:
bro_val, sis_val = sis_val, bro_val
bro[i] = bro_val
sis[i] = sis_val
except ZeroDivisionError:
# The offspring already have legitimate values for every element,
# so no need to take any special action here.
pass
return [bro, sis]
else:
return [mom, dad]
@crossover
def laplace_crossover(random, mom, dad, args):
"""Return the offspring of Laplace crossover on the candidates.
This function performs Laplace crosssover (LX), following the
implementation specified in (Deep and Thakur, "A new crossover
operator for real coded genetic algorithms," Applied Mathematics
and Computation, Volume 188, Issue 1, May 2007, pp. 895--911).
This function also makes use of the bounder function as specified
in the EC's ``evolve`` method.
.. Arguments:
random -- the random number generator object
mom -- the first parent candidate
dad -- the second parent candidate
args -- a dictionary of keyword arguments
Optional keyword arguments in args:
- *crossover_rate* -- the rate at which crossover is performed
(default 1.0)
- *lx_location* -- the location parameter (default 0)
- *lx_scale* -- the scale parameter (default 0.5)
In some sense, the *lx_location* and *lx_scale* parameters can be thought
of as analogs in a Laplace distribution to the mean and standard
deviation of a Gaussian distribution. If *lx_scale* is near zero, offspring
will be produced near the parents. If *lx_scale* is farther from zero,
offspring will be produced far from the parents.
"""
crossover_rate = args.setdefault('crossover_rate', 1.0)
if random.random() < crossover_rate:
bounder = args['_ec'].bounder
a = args.setdefault('lx_location', 0)
b = args.setdefault('lx_scale', 0.5)
bro = copy.copy(dad)
sis = copy.copy(mom)
for i, (m, d) in enumerate(zip(mom, dad)):
u = random.random()
if random.random() <= 0.5:
beta = a - b * math.log(u)
else:
beta = a + b * math.log(u)
bro[i] = m + beta * abs(m - d)
sis[i] = d + beta * abs(m - d)
bro = bounder(bro, args)
sis = bounder(sis, args)
return [bro, sis]
else:
return [mom, dad]
|
aarongarrett/inspyred
|
inspyred/ec/variators/crossovers.py
|
Python
|
mit
| 18,622
|
[
"Gaussian"
] |
32c6beadb296fa4e252bb048ab48e80bed33d84573a43c23d0b2ccb7a97d3263
|
#!/usr/bin/env python
# easyFig.py Written by: Mitchell Sullivan mjsull@gmail.com
# Supervisor: Dr. Scott Beatson and Dr. Nico Petty University of Queensland
# Version 2.2.3 08.11.2016
# License: GPLv3
import os
import subprocess
from math import ceil, hypot
import threading
import time
import struct
import base64
import string
from ftplib import FTP
import tarfile
import platform
import shutil
import webbrowser
import operator
import sys
def colorstr(rgb):
return "#%x%x%x" % (int(rgb[0] / 16), int(rgb[1] / 16), int(rgb[2] / 16))
def binar(s):
transdict = {
"0": "0000",
"1": "0001",
"2": "0010",
"3": "0011",
"4": "0100",
"5": "0101",
"6": "0110",
"7": "0111",
"8": "1000",
"9": "1001",
"a": "1010",
"b": "1011",
"c": "1100",
"d": "1101",
"e": "1110",
"f": "1111",
}
outstring = ""
for i in s:
outstring += transdict[i]
return outstring
class scalableVectorGraphics:
def __init__(self, height, width):
self.height = height
self.width = width
self.out = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:cc="http://creativecommons.org/ns#"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:svg="http://www.w3.org/2000/svg"
xmlns="http://www.w3.org/2000/svg"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
height="%d"
width="%d"
id="svg2"
version="1.1"
inkscape:version="0.48.4 r9939"
sodipodi:docname="easyfig">
<metadata
id="metadata122">
<rdf:RDF>
<cc:Work
rdf:about="">
<dc:format>image/svg+xml</dc:format>
<dc:type
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
<dc:title>Easyfig</dc:title>
</cc:Work>
</rdf:RDF>
</metadata>
<defs
id="defs120" />
<sodipodi:namedview
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1"
objecttolerance="10"
gridtolerance="10"
guidetolerance="10"
inkscape:pageopacity="0"
inkscape:pageshadow="2"
inkscape:window-width="640"
inkscape:window-height="480"
id="namedview118"
showgrid="false"
inkscape:zoom="0.0584"
inkscape:cx="2500"
inkscape:cy="75.5"
inkscape:window-x="55"
inkscape:window-y="34"
inkscape:window-maximized="0"
inkscape:current-layer="svg2" />
<title
id="title4">Easyfig</title>
<g
style="fill-opacity:1.0; stroke:black; stroke-width:1;"
id="g6">""" % (
self.height,
self.width,
)
def drawLine(self, x1, y1, x2, y2, th=1, cl=(0, 0, 0)):
self.out += (
' <line x1="%d" y1="%d" x2="%d" y2="%d"\n stroke-width="%d" stroke="%s" />\n'
% (x1, y1, x2, y2, th, colorstr(cl))
)
def writesvg(self, filename):
outfile = open(filename, "w")
outfile.write(self.out + " </g>\n</svg>")
outfile.close()
def drawRightArrow(self, x, y, wid, ht, fc, oc=(0, 0, 0), lt=1):
if lt > ht / 2:
lt = ht / 2
x1 = x + wid
y1 = y + ht / 2
x2 = x + wid - ht / 2
ht -= 1
if wid > ht / 2:
self.out += ' <polygon fill="%s" stroke="%s" stroke-width="%d"\n' % (
colorstr(fc),
colorstr(oc),
lt,
)
self.out += (
' points="%d,%d %d,%d %d,%d %d,%d %d,%d %d,%d %d,%d" />\n'
% (
x,
y + ht / 4,
x2,
y + ht / 4,
x2,
y,
x1,
y1,
x2,
y + ht,
x2,
y + 3 * ht / 4,
x,
y + 3 * ht / 4,
)
)
else:
self.out += ' <polygon fill="%s" stroke="%s" stroke-width="%d"\n' % (
colorstr(fc),
colorstr(oc),
lt,
)
self.out += ' points="%d,%d %d,%d %d,%d" />\n' % (
x,
y,
x,
y + ht,
x + wid,
y1,
)
def drawLeftArrow(self, x, y, wid, ht, fc, oc=(0, 0, 0), lt=1):
if lt > ht / 2:
lt = ht / 2
x1 = x + wid
y1 = y + ht / 2
x2 = x + ht / 2
ht -= 1
if wid > ht / 2:
self.out += ' <polygon fill="%s" stroke="%s" stroke-width="%d"\n' % (
colorstr(fc),
colorstr(oc),
lt,
)
self.out += (
' points="%d,%d %d,%d %d,%d %d,%d %d,%d %d,%d %d,%d" />\n'
% (
x1,
y + ht / 4,
x2,
y + ht / 4,
x2,
y,
x,
y1,
x2,
y + ht,
x2,
y + 3 * ht / 4,
x1,
y + 3 * ht / 4,
)
)
else:
self.out += ' <polygon fill="%s" stroke="%s" stroke-width="%d"\n' % (
colorstr(fc),
colorstr(oc),
lt,
)
self.out += ' points="%d,%d %d,%d %d,%d" />\n' % (
x,
y1,
x1,
y + ht,
x1,
y,
)
def drawBlastHit(self, x1, y1, x2, y2, x3, y3, x4, y4, fill=(0, 0, 255), lt=0):
self.out += ' <polygon fill="%s" stroke="%s" stroke-width="%d"\n' % (
colorstr(fill),
colorstr(fill),
lt,
)
self.out += ' points="%d,%d %d,%d %d,%d %d,%d" />\n' % (
x1,
y1,
x2,
y2,
x3,
y3,
x4,
y4,
)
def drawGradient(self, x1, y1, wid, hei, minc, maxc):
self.out += ' <defs>\n <linearGradient id="MyGradient" x1="0%" y1="0%" x2="0%" y2="100%">\n'
self.out += ' <stop offset="0%%" stop-color="%s" />\n' % colorstr(maxc)
self.out += ' <stop offset="100%%" stop-color="%s" />\n' % colorstr(minc)
self.out += " </linearGradient>\n </defs>\n"
self.out += ' <rect fill="url(#MyGradient)" stroke-width="0"\n'
self.out += ' x="%d" y="%d" width="%d" height="%d"/>\n' % (
x1,
y1,
wid,
hei,
)
def drawGradient2(self, x1, y1, wid, hei, minc, maxc):
self.out += ' <defs>\n <linearGradient id="MyGradient2" x1="0%" y1="0%" x2="0%" y2="100%">\n'
self.out += ' <stop offset="0%%" stop-color="%s" />\n' % colorstr(maxc)
self.out += ' <stop offset="100%%" stop-color="%s" />\n' % colorstr(minc)
self.out += " </linearGradient>\n</defs>\n"
self.out += ' <rect fill="url(#MyGradient2)" stroke-width="0"\n'
self.out += ' x="%d" y="%d" width="%d" height="%d" />\n' % (
x1,
y1,
wid,
hei,
)
def drawOutRect(self, x1, y1, wid, hei, fill, lt=1):
self.out += ' <rect fill="%s" stroke-width="%d"\n' % (colorstr(fill), lt)
self.out += ' x="%d" y="%d" width="%d" height="%d" />\n' % (
x1,
y1,
wid,
hei,
)
def drawRightFrame(self, x, y, wid, ht, lt, frame, fill):
if lt > ht / 2:
lt = ht / 2
if frame == 1:
y1 = y + ht / 2
y2 = y + ht * 3 / 8
y3 = y + ht * 1 / 4
elif frame == 2:
y1 = y + ht * 3 / 8
y2 = y + ht * 1 / 4
y3 = y + ht * 1 / 8
elif frame == 0:
y1 = y + ht * 1 / 4
y2 = y + ht * 1 / 8
y3 = y + 1
x1 = x
x2 = x + wid - ht / 8
x3 = x + wid
if wid > ht / 8:
self.out += ' <polygon fill="%s" stroke="%s" stroke-width="%d"\n' % (
colorstr(fill),
colorstr((0, 0, 0)),
lt,
)
self.out += ' points="%d,%d %d,%d %d,%d %d,%d %d,%d" />\n' % (
x1,
y1,
x2,
y1,
x3,
y2,
x2,
y3,
x1,
y3,
)
else:
self.out += ' <polygon fill="%s" stroke="%s" stroke-width="%d"\n' % (
colorstr(fill),
colorstr((0, 0, 0)),
lt,
)
self.out += ' points="%d,%d %d,%d %d,%d" />\n' % (
x1,
y1,
x3,
y2,
x1,
y3,
)
def drawRightFrameRect(self, x, y, wid, ht, lt, frame, fill):
if lt > ht / 2:
lt = ht / 2
if frame == 1:
y1 = y + ht / 4
elif frame == 2:
y1 = y + ht / 8
elif frame == 0:
y1 = y + 1
hei = ht / 4
x1 = x
self.out += ' <rect fill="%s" stroke-width="%d"\n' % (colorstr(fill), lt)
self.out += ' x="%d" y="%d" width="%d" height="%d" />\n' % (
x1,
y1,
wid,
hei,
)
def drawLeftFrame(self, x, y, wid, ht, lt, frame, fill):
if lt > ht / 2:
lt = ht / 2
if frame == 1:
y1 = y + ht
y2 = y + ht * 7 / 8
y3 = y + ht * 3 / 4
elif frame == 2:
y1 = y + ht * 7 / 8
y2 = y + ht * 3 / 4
y3 = y + ht * 5 / 8
elif frame == 0:
y1 = y + ht * 3 / 4
y2 = y + ht * 5 / 8
y3 = y + ht / 2
x1 = x + wid
x2 = x + ht / 8
x3 = x
if wid > ht / 8:
self.out += ' <polygon fill="%s" stroke="%s" stroke-width="%d"\n' % (
colorstr(fill),
colorstr((0, 0, 0)),
lt,
)
self.out += ' points="%d,%d %d,%d %d,%d %d,%d %d,%d" />\n' % (
x1,
y1,
x2,
y1,
x3,
y2,
x2,
y3,
x1,
y3,
)
else:
self.out += ' <polygon fill="%s" stroke="%s" stroke-width="%d"\n' % (
colorstr(fill),
colorstr((0, 0, 0)),
lt,
)
self.out += ' points="%d,%d %d,%d %d,%d" />\n' % (
x1,
y1,
x3,
y2,
x1,
y3,
)
def drawLeftFrameRect(self, x, y, wid, ht, lt, frame, fill):
if lt > ht / 2:
lt = ht / 2
if frame == 1:
y1 = y + ht * 3 / 4
elif frame == 2:
y1 = y + ht * 5 / 8
elif frame == 0:
y1 = y + ht / 2
hei = ht / 4
x1 = x
self.out += ' <rect fill="%s" stroke-width="%d"\n' % (colorstr(fill), lt)
self.out += ' x="%d" y="%d" width="%d" height="%d" />\n' % (
x1,
y1,
wid,
hei,
)
def drawPointer(self, x, y, ht, lt, fill):
x1 = x - int(round(0.577350269 * ht / 2))
x2 = x + int(round(0.577350269 * ht / 2))
y1 = y + ht / 2
y2 = y + 1
self.out += ' <polygon fill="%s" stroke="%s" stroke-width="%d"\n' % (
colorstr(fill),
colorstr((0, 0, 0)),
lt,
)
self.out += ' points="%d,%d %d,%d %d,%d" />\n' % (
x1,
y2,
x2,
y2,
x,
y1,
)
def drawDash(self, x1, y1, x2, y2, exont):
self.out += ' <line x1="%d" y1="%d" x2="%d" y2="%d"\n' % (x1, y1, x2, y2)
self.out += ' style="stroke-dasharray: 5, 3, 9, 3"\n'
self.out += ' stroke="#000" stroke-width="%d" />\n' % exont
def writeString(
self, thestring, x, y, size, ital=False, bold=False, rotate=0, justify="left"
):
if rotate != 0:
x, y = y, x
self.out += " <text\n"
self.out += (
' style="font-size:%dpx;font-style:normal;font-weight:normal\
;line-height:125%%;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;font-family:Sans"\n'
% size
)
if justify == "right":
self.out += ' text-anchor="end"\n'
if rotate == 1:
self.out += ' x="-%d"\n' % x
else:
self.out += ' x="%d"\n' % x
if rotate == -1:
self.out += ' y="-%d"\n' % y
else:
self.out += ' y="%d"\n' % y
self.out += ' sodipodi:linespacing="125%"'
if rotate == -1:
self.out += '\n transform="matrix(0,1,-1,0,0,0)"'
if rotate == 1:
self.out += '\n transform="matrix(0,-1,1,0,0,0)"'
self.out += '><tspan\n sodipodi:role="line"\n'
if rotate == 1:
self.out += ' x="-%d"\n' % x
else:
self.out += ' x="%d"\n' % x
if rotate == -1:
self.out += ' y="-%d"' % y
else:
self.out += ' y="%d"' % y
if ital and bold:
self.out += '\nstyle="font-style:italic;font-weight:bold"'
elif ital:
self.out += '\nstyle="font-style:italic"'
elif bold:
self.out += '\nstyle="font-style:normal;font-weight:bold"'
self.out += ">" + thestring + "</tspan></text>\n"
# class of blast hit data
class BlastHit:
def __init__(
self,
query,
ref,
ident,
length,
mismatch,
gaps,
qStart,
qEnd,
rStart,
rEnd,
eValue,
bitscore,
):
self.query = query
self.ref = ref
self.ident = float(ident)
self.length = int(length)
self.mismatch = int(mismatch)
self.gaps = int(gaps)
self.qStart = int(qStart)
self.qEnd = int(qEnd)
self.rStart = int(rStart)
self.rEnd = int(rEnd)
self.eValue = float(eValue)
self.bitscore = float(bitscore)
# class for feature data
class feature:
def __init__(self, start, stop, type, strand, colour, name):
self.start = start
self.stop = stop
self.type = type
self.strand = strand
self.colour = colour
self.name = name
def length(self):
if type(self.start) == int:
return self.stop - self.start
else:
return self.stop[-1] - self.start[0]
# method for converting base pair position into pixel position
def convertPos(length, maxlength, width, pos, aln):
if aln == "centre":
return int(((((maxlength - length) * 1.0 / 2) + pos) * 1.0 / maxlength) * width)
elif aln == "left":
return int(((pos * 1.0 / maxlength) * width))
elif aln == "right":
return int(((((maxlength - length) * 1.0) + pos) * 1.0 / maxlength) * width)
elif aln == "best blast":
return int((((pos + shifter) * 1.0 / maxlength) * width))
# method for converting base pair position into pixel position if the genome has been reversed
def convertPosR(length, maxlength, width, pos, aln):
if aln == "centre":
return int(
width
- (((((maxlength - length) * 1.0 / 2) + pos) * 1.0 / maxlength) * width)
)
elif aln == "left":
return int(
width - (((((maxlength - length) * 1.0) + pos) * 1.0 / maxlength) * width)
)
elif aln == "right":
return int(width - ((pos * 1.0 / maxlength) * width))
elif aln == "best blast":
return int(
width
- (
((((maxlength - length) * 1.0) + pos - shifter) * 1.0 / maxlength)
* width
)
)
""" Functions and classes for the bmp module.
This section of the code uses a modified version of Paul McGuire's
(http://www.geocities.com/ptmcg/) (RIP geocities/under constuction gifs)
bmp.py - module for constructing simple BMP graphics files
It is freely avaiable under the following license
license for all code contained:
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
def shortToString(i):
hi = (i & 0xFF00) >> 8
lo = i & 0x00FF
return chr(lo) + chr(hi)
def longToString(i):
hi = (int(i) & 0x7FFF0000) >> 16
lo = int(i) & 0x0000FFFF
return shortToString(lo) + shortToString(hi)
# class
class Color(object):
"""class for specifying s while drawing BitMap elements"""
__slots__ = ["red", "grn", "blu"]
__shade = 32
def __init__(self, r=0, g=0, b=0):
self.red = r
self.grn = g
self.blu = b
def __setattr__(self, name, value):
if hasattr(self, name):
raise AttributeError("Color is immutable")
else:
object.__setattr__(self, name, value)
def __str__(self):
return "R:%d G:%d B:%d" % (self.red, self.grn, self.blu)
def __hash__(self):
return (int(self.blu)) + (int(self.grn) << 8) + (int(self.red) << 16)
def __eq__(self, other):
return (self is other) or (self.toLong == other.toLong)
def lighten(self):
return Color(
min(self.red + Color.__shade, 255),
min(self.grn + Color.__shade, 255),
min(self.blu + Color.__shade, 255),
)
def darken(self):
return Color(
max(self.red - Color.__shade, 0),
max(self.grn - Color.__shade, 0),
max(self.blu - Color.__shade, 0),
)
def toLong(self):
return self.__hash__()
def fromLong(l):
b = l & 0xFF
l = l >> 8
g = l & 0xFF
l = l >> 8
r = l & 0xFF
return Color(r, g, b)
fromLong = staticmethod(fromLong)
# define class constants for common s
Color.BLACK = Color(0, 0, 0)
Color.RED = Color(255, 0, 0)
Color.GREEN = Color(0, 255, 0)
Color.BLUE = Color(0, 0, 255)
Color.CYAN = Color(0, 255, 255)
Color.MAGENTA = Color(255, 0, 255)
Color.YELLOW = Color(255, 255, 0)
Color.WHITE = Color(255, 255, 255)
Color.DKRED = Color(128, 0, 0)
Color.DKGREEN = Color(0, 128, 0)
Color.DKBLUE = Color(0, 0, 128)
Color.TEAL = Color(0, 128, 128)
Color.PURPLE = Color(128, 0, 128)
Color.BROWN = Color(128, 128, 0)
Color.GRAY = Color(128, 128, 128)
class BitMap(object):
"""class for drawing and saving simple Windows bitmap files"""
LINE_SOLID = 0
LINE_DASHED = 1
LINE_DOTTED = 2
LINE_DOT_DASH = 3
_DASH_LEN = 12.0
_DOT_LEN = 6.0
_DOT_DASH_LEN = _DOT_LEN + _DASH_LEN
def __init__(self, width, height, bkgd=Color.WHITE, frgd=Color.BLACK):
self.wd = int(ceil(width))
self.ht = int(ceil(height))
self.bg = 0
self.fg = 1
self.palette = []
self.palette.append(bkgd.toLong())
self.palette.append(frgd.toLong())
self.setDefaultPenColor()
tmparray = [self.bg] * self.wd
self.bitarray = [tmparray[:] for i in range(self.ht)]
self.currentPen = 1
self.fontName = "%s-%d-%s" % ("none", 0, "none")
self.defsize = 64
self.amatrixwid = 41
self.amatrixhei = 48
self.amatrixori = 16
self.amatrix = "3ff8000001ffffc00003fffff80003fffffe0003ffffff8003ffffffe003\
fffffff801ff801ffc01ff0001fe00ff00007f80ff00003fc07f80000fe0\
3fc00007f01fc00003f80fe00001fc00000000fe000000007f000000007f\
800000003fc0000000ffe000000ffff00003fffff8001ffffffc003fffff\
fe007fffff7f00fffff83f807fff001fc07ff8000fe07fe00007f03fc000\
03f81fe00001fc1fe00000fe0fe000007f07f000003f83f800001fc1fc00\
001fe0fe00000ff07f00000ff83fc0000ffc0ff0001ffe07fc001fff01ff\
807fffc0ffffffcffe3fffffc7ff0fffffc3ff83ffff80ffc07fff007fe0\
07f8000fe"
self.bmatrixwid = 40
self.bmatrixhei = 64
self.bmatrixori = 0
self.bmatrix = "7f000000007f000000007f000000007f000000007f000000007f00000000\
7f000000007f000000007f000000007f000000007f000000007f00000000\
7f000000007f000000007f000000007f000000007f007fc0007f03fff800\
7f07fffe007f1fffff007f3fffff807f7fffffc07f7fffffe07fff00fff0\
7ffc003ff07ff8000ff87ff00007f87fe00003fc7fe00003fc7fc00001fe\
7fc00001fe7f800000fe7f800000fe7f800000fe7f000000ff7f0000007f\
7f0000007f7f0000007fff0000007fff0000007fff0000007fff0000007f\
ff0000007fff0000007fff0000007fff800000feff800000feff800000fe\
ff800000feffc00001feffc00001fcffe00003fcffe00007f8fff00007f8\
fff8000ff8fffe003ff0ffff80ffe0feffffffe0fe7fffffc0fe3fffff80\
fe1ffffe00fe0ffffc000003fff00000003f0000"
self.cmatrixwid = 37
self.cmatrixhei = 48
self.cmatrixori = 16
self.cmatrix = "1ff0000007fff00000ffffe0001fffff8001ffffff001ffffff801ffffff\
e00ffc01ff80ff8007fc0ff8001ff07f80007f87f80001fe3fc0000ff1fc\
00003f9fe00001fcfe00000fe7f00000003f80000001f80000001fc00000\
00fe00000007f00000003f80000001fc0000000fe00000007f00000003f8\
0000001fc0000000fe00000007f00000001fc0000000fe000007f7f00000\
3fbfc00003f8fe00001fc7f80000fe3fc0000ff0ff0000ff07fc000ff81f\
f000ff807fe01ffc03ffffffc00ffffffc003fffffc000fffffc0001ffff\
c00003fff8000001fc000"
self.dmatrixwid = 40
self.dmatrixhei = 64
self.dmatrixori = 0
self.dmatrix = "7f000000007f000000007f000000007f000000007f000000007f00000000\
7f000000007f000000007f000000007f000000007f000000007f00000000\
7f000000007f000000007f000000007f0003ff007f001fffc07f003ffff0\
7f00fffffc7f01fffffe7f03ffffff7f07ffffffff0fff00ffff0ffc003f\
ff1ff0000fff1fe00007ff3fe00003ff3fc00003ff7f800001ff7f800001\
ff7f000000ff7f000000ff7f000000ffff000000fffe0000007ffe000000\
7ffe0000007ffe0000007ffe0000007ffe0000007ffe0000007ffe000000\
7ffe0000007ffe0000007f7f000000ff7f000000ff7f000000ff7f000000\
ff7f800001ff3f800001ff3fc00003ff3fe00003ff1ff00007ff1ff8000f\
ff0ffc003fff07ff00ffff07ffffff7f03fffffe7f01fffffc7f00fffff8\
7f003ffff07f000fffc0000000fc0000"
self.ematrixwid = 39
self.ematrixhei = 48
self.ematrixori = 16
self.ematrix = "1ff0000001fffe00000fffff00003fffff0001ffffff0007ffffff001fff\
ffff003ff007ff00ff8003ff03fe0003fe07f80001fe0fe00003fc3f8000\
03fc7f000003f9fc000007f3f800000fe7f000000fcfc000001fff800000\
3fff0000007fffffffffffffffffffffffffffffffffffffffffffffffff\
ffffffffffff800000007f00000000fe00000001fc00000001f800000003\
f800000007f000001fcfe000003f8fe00000ff1fc00001fc3fc00007f83f\
c0001fe07fc0003fc07fe001ff007ff00ffe00fffffff800ffffffe000ff\
ffff80007ffffe00007ffff000003fff80000007f8000"
self.fmatrixwid = 20
self.fmatrixhei = 62
self.fmatrixori = 0
self.fmatrix = "7f003ff007ff00fff01fff01fff01fe003fc003f8003f8003f8003f8003f\
8003f8003f8003f8003f80ffffffffffffffffffffffffffffff03f8003f\
8003f8003f8003f8003f8003f8003f8003f8003f8003f8003f8003f8003f\
8003f8003f8003f8003f8003f8003f8003f8003f8003f8003f8003f8003f\
8003f8003f8003f8003f8003f8003f8003f8003f8003f8003f8003f8003f\
8003f80"
self.gmatrixwid = 39
self.gmatrixhei = 65
self.gmatrixori = 16
self.gmatrix = "1ff0000000fff80fe007fffc1fc03ffffc3f80fffffc7f03fffffcfe0fff\
fffdfc1ff807fff87fc003fff1ff0003ffe3fc0003ffcff00003ff9fe000\
03ff3f800007feff000007fdfc00000ffbf800001ff7f000003fefc00000\
3fff8000007fff000000fffe000001fffc000003fff8000007fff000000f\
ffe000001fffc000003fff8000007fff000000fffe000001fefe000007fd\
fc00000ffbf800001ff7f800007fe7f00000ffcff00001ff9fe00007ff1f\
e0001ffe3fe0007ffc3fe001fff83ff00feff07fffffdfe07fffff3fc07f\
fffc7f807ffff0ff003fff81fe001ffe03f80007c007f00000000fe00000\
003fc00000007f9fc00000ff3f800001fc7f800007f87f00000ff0ff0000\
3fc1ff0000ff81ff0007fe01ff803ff803fffffff003ffffffc003fffffe\
0001fffff80000ffff8000001ff8000"
self.hmatrixwid = 35
self.hmatrixhei = 62
self.hmatrixori = 0
self.hmatrix = "3f80000007f0000000fe0000001fc0000003f80000007f0000000fe00000\
01fc0000003f80000007f0000000fe0000001fc0000003f80000007f0000\
000fe0000001fc0000003f803fe007f03fffc0fe0ffffc1fc7ffffe3f9ff\
fffe7f7fffffcfffe01ffdfff000ffbffc000ff7ff0000ffffc0001ffff0\
0001fffe00003fff800007fff00000fffe00001fff800003fff000007ffe\
00000fffc00001fff800003fff000007ffe00000fffc00001fff800003ff\
f000007ffe00000fffc00001fff800003fff000007ffe00000fffc00001f\
ff800003fff000007ffe00000fffc00001fff800003fff000007ffe00000\
fffc00001fff800003fff000007ffe00000fffc00001fff800003fff0000\
07f"
self.imatrixwid = 7
self.imatrixhei = 63
self.imatrixori = 0
self.imatrix = "1fffffffffffffffc0000000000000007fffffffffffffffffffffffffff\
fffffffffffffffffffffffffffffffffffffffffffffffffff"
self.jmatrixwid = 14
self.jmatrixhei = 80
self.jmatrixori = 0
self.jmatrix = "1fc07f01fc07f01fc07f01fc07f01fc00000000000000000000000000000\
7f01fc07f01fc07f01fc07f01fc07f01fc07f01fc07f01fc07f01fc07f01\
fc07f01fc07f01fc07f01fc07f01fc07f01fc07f01fc07f01fc07f01fc07\
f01fc07f01fc07f01fc07f01fc07f01fc07f01fc07f01fc07f01fc07f01f\
c07f01fc0ff03fc1fffffbffefff3ff8ffc3f00"
self.kmatrixwid = 38
self.kmatrixhei = 62
self.kmatrixori = 0
self.kmatrix = "fe00000003f80000000fe00000003f80000000fe00000003f80000000fe0\
0000003f80000000fe00000003f80000000fe00000003f80000000fe0000\
0003f80000000fe00000003f80000000fe00000003f80001ff8fe0000ff8\
3f80007fc0fe0003fe03f8001ff00fe000ff803f8007fc00fe003fe003f8\
01ff000fe00ff8003f807fc000fe03fe0003f81ff0000fe0ff80003f87fc\
0000fe3fe00003f9ffc0000fe7ff80003fbffe0000fffffc0003fffff000\
0fffbfe0003ffc7fc000ffe0ff0003ff03fe000ff807fc003fc01ff000fe\
003fe003f8007f800fe001ff003f8003fe00fe0007f803f8001ff00fe000\
3fe03f8000ff80fe0001ff03f80003fc0fe0000ff83f80001ff0fe00003f\
c3f80000ff8fe00001fe3f800007fcfe00000ffbf800001ff"
self.lmatrixwid = 7
self.lmatrixhei = 62
self.lmatrixori = 0
self.lmatrix = "3fffffffffffffffffffffffffffffffffffffffffffffffffffffffffff\
fffffffffffffffffffffffffffffffffffffffffffffffff"
self.mmatrixwid = 59
self.mmatrixhei = 46
self.mmatrixori = 16
self.mmatrix = "3fe0001ff007f03fff001fffc0fe0ffff807fffe1fc7ffff83ffffe3f9ff\
fff8fffffe7f7fffffbfffffcfffc07ffff01ffdffe003fff800ffbff800\
3ffe000ffffe0003ff8000ffff80007fe0001ffff00007fc0001fffc0000\
ff00003fff80001fe00007ffe00003f80000fffc00007f00001fff80000f\
e00003fff00001fc00007ffe00003f80000fffc00007f00001fff80000fe\
00003fff00001fc00007ffe00003f80000fffc00007f00001fff80000fe0\
0003fff00001fc00007ffe00003f80000fffc00007f00001fff80000fe00\
003fff00001fc00007ffe00003f80000fffc00007f00001fff80000fe000\
03fff00001fc00007ffe00003f80000fffc00007f00001fff80000fe0000\
3fff00001fc00007ffe00003f80000fffc00007f00001fff80000fe00003\
fff00001fc00007ffe00003f80000fffc00007f00001fff80000fe00003f\
ff00001fc00007f"
self.nmatrixwid = 35
self.nmatrixhei = 46
self.nmatrixori = 16
self.nmatrix = "1fe007f01fff80fe0ffffc1fc3ffffc3f8fffffc7f3fffffcfefe01ffdff\
f001ffbffc000ff7ff0001ffffc0001ffff00001fffe00003fff800007ff\
f00000fffe00001fff800003fff000007ffe00000fffc00001fff800003f\
ff000007ffe00000fffc00001fff800003fff000007ffe00000fffc00001\
fff800003fff000007ffe00000fffc00001fff800003fff000007ffe0000\
0fffc00001fff800003fff000007ffe00000fffc00001fff800003fff000\
007ffe00000fffc00001fff800003fff000007f"
self.omatrixwid = 40
self.omatrixhei = 48
self.omatrixori = 16
self.omatrix = "1ff8000000ffff000003ffffc0000ffffff0001ffffff8003ffffffc007f\
fffffe00fff00fff00ffc003ff01ff0000ff81fe00007f83fc00003fc3fc\
00003fc3f800001fc7f800001fe7f000000fe7f000000fe7f000000fefe0\
00000fffe0000007ffe0000007ffe0000007ffe0000007ffe0000007ffe0\
000007ffe0000007ffe0000007ffe0000007ffe0000007fff000000ff7f0\
00000fe7f000000fe7f000000fe7f800001fe3f800001fc3fc00003fc3fc\
00003fc1fe00007f81ff0000ff80ffc003ff007ff00ffe007ffffffe003f\
fffffc001ffffff8000fffffe00003ffffc00000ffff0000000ff0000"
self.pmatrixwid = 40
self.pmatrixhei = 64
self.pmatrixori = 16
self.pmatrix = "7fc000fe03fff800fe0ffffe00fe1fffff00fe3fffff80fe7fffffc0feff\
ffffe0ffff00fff0fffc003ff0fff8000ff8fff00007f8ffe00003fcffe0\
0003fcffc00001fcffc00001feff800000feff800000feff800000feff00\
0000ffff0000007fff0000007fff0000007fff0000007fff0000007fff00\
00007f7f0000007f7f0000007f7f0000007f7f0000007f7f800000ff7f80\
0000fe7f800000fe7f800000fe7fc00001fe7fc00001fc7fe00003fc7fe0\
0007fc7ff0000ff87ff8001ff87ffc003ff07fff80fff07fffffffe07f7f\
ffffc07f3fffff807f1fffff007f07fffc007f01fff0007f003f80007f00\
0000007f000000007f000000007f000000007f000000007f000000007f00\
0000007f000000007f000000007f000000007f000000007f000000007f00\
0000007f000000007f000000007f00000000"
self.qmatrixwid = 40
self.qmatrixhei = 64
self.qmatrixori = 16
self.qmatrix = "1fe0000000fffc07f003ffff07f00fffff87f01fffffe7f03ffffff7f07f\
fffff7f07ff007fff0ffc003fff1ff0000fff1fe00007ff3fe00003ff3fc\
00003ff3f800001ff7f800001ff7f000000ff7f000000ff7f000000ff7f0\
00000fffe0000007ffe0000007ffe0000007ffe0000007ffe0000007ffe0\
000007ffe0000007ffe0000007ffe0000007ffe0000007fff000000ff7f0\
00000ff7f000000ff7f000000ff7f800001ff7f800001ff3fc00003ff3fe\
00003ff1ff00007ff1ff8000fff0ffc003fff0fff00ffff07ffffff7f03f\
fffff7f01fffffe7f00fffff87f003ffff07f000fffc07f0001fc007f000\
000007f000000007f000000007f000000007f000000007f000000007f000\
000007f000000007f000000007f000000007f000000007f000000007f000\
000007f000000007f000000007f000000007f"
self.rmatrixwid = 22
self.rmatrixhei = 46
self.rmatrixori = 16
self.rmatrix = "3bf80fffe07fff87fffe3ffff9ffffe7ffffbfe0fffc03ffc00ffe003ff0\
00ff8003fe000ff0003fc000ff0003f8000fe0003f8000fe0003f8000fe0\
003f8000fe0003f8000fe0003f8000fe0003f8000fe0003f8000fe0003f8\
000fe0003f8000fe0003f8000fe0003f8000fe0003f8000fe0003f8000fe\
0003f8000"
self.smatrixwid = 36
self.smatrixhei = 48
self.smatrixori = 16
self.smatrix = "7fe000003fffc0001fffff0003fffffc007fffffe00fffffff01fffffff0\
1ff803ff83fe0007f83f80007f87f80003fc7f00001fc7f00001fc7f0000\
1fc7f00000007f00000007f80000003fc0000003ff8000003fff000001ff\
ff00000fffff00007fffff0001fffffc0007fffff0001fffff80001ffffc\
00001fffc000001ffe0000003fe0000001ff0000000ff00000007ffe0000\
07ffe000007fff000007f7f000007f7f80000fe7f80001fe7fe0007fe3ff\
801ffc3fffffff81fffffff80fffffff007fffffc001fffff80007fffc00\
0007fc000"
self.tmatrixwid = 20
self.tmatrixhei = 59
self.tmatrixori = 5
self.tmatrix = "3f8003f8003f8003f8003f8003f8003f8003f8003f8003f8003f8003f80f\
fffffffffffffffffffffffffffff03f8003f8003f8003f8003f8003f800\
3f8003f8003f8003f8003f8003f8003f8003f8003f8003f8003f8003f800\
3f8003f8003f8003f8003f8003f8003f8003f8003f8003f8003f8003f800\
3f8003f8003f8003f8003fc003fff03fff01fff00fff007ff001fe"
self.umatrixwid = 35
self.umatrixhei = 47
self.umatrixori = 17
self.umatrix = "1fc00001fff800003fff000007ffe00000fffc00001fff800003fff00000\
7ffe00000fffc00001fff800003fff000007ffe00000fffc00001fff8000\
03fff000007ffe00000fffc00001fff800003fff000007ffe00000fffc00\
001fff800003fff000007ffe00000fffc00001fff800003fff000007ffe0\
0000fffc00001fff800003fff000007ffe00001fffc00003fff800007fff\
00001fffe00003fffe0000ffffe0003ffbfc000fff7fe003ffeffe03fdfc\
ffffff3f8fffffc7f0fffff0fe0ffffc1fc07ffe000001fe0000"
self.vmatrixwid = 40
self.vmatrixhei = 45
self.vmatrixori = 17
self.vmatrix = "ff000000ffff000000ff7f800001fe7f800001fe7f800001fe3fc00003fc\
3fc00003fc3fc00003f81fe00007f81fe00007f81fe00007f00ff0000ff0\
0ff0000ff007f0000fe007f8001fe007f8001fc003f8001fc003fc003fc0\
03fc003f8001fc003f8001fe007f8001fe007f0000fe007f0000ff00fe00\
00ff00fe00007f00fe00007f01fc00007f81fc00003f83f800003f83f800\
003fc3f800001fc7f000001fc7f000000fe7f000000fefe000000fefe000\
0007ffc0000007ffc0000007ffc0000003ff80000003ff80000003ff8000\
0001ff00000001ff00000001fe0000"
self.wmatrixwid = 60
self.wmatrixhei = 45
self.wmatrixori = 17
self.wmatrix = "ff00003fc0000ff7f00007fe0000ff7f80007fe0001fe7f80007fe0001fe\
7f80007fe0001fe3f8000fff0001fc3fc000fff0003fc3fc000fff0003fc\
1fc000fff0003f81fc001fff8003f81fe001fff8007f81fe001fbf8007f8\
0fe001fbf8007f00fe003f9fc007f00ff003f9fc00ff007f003f1fc00fe0\
07f003f1fc00fe007f007f0fe00fe007f807f0fe01fe003f807e0fe01fc0\
03f807e07f01fc003fc0fe07f01fc001fc0fe07f03f8001fc0fc07f03f80\
01fc0fc03f83f8001fe1fc03f83f8000fe1fc03f87f0000fe1f803f87f00\
00fe1f801fc7f00007f3f801fc7e00007f3f801fcfe00007f3f001fcfe00\
007f3f000fefe00003fff000fefc00003fff000fffc00003ffe000fffc00\
001ffe0007ff800001ffe0007ff800001ffe0007ff800001ffc0007ff800\
000ffc0003ff000000ffc0003ff000000ffc0003ff0000007f80003fe000\
0007f80001fe000"
self.xmatrixwid = 39
self.xmatrixhei = 45
self.xmatrixori = 17
self.xmatrix = "3fe00000ff3fc00003fc3fc0000ff03fc0001fc07f80007f807f8001fe00\
7f8003f800ff000ff000ff003fc000ff007f0001fe01fe0001fe07f80001\
fe0fe00003fc3fc00003fcff000003fdfc000007fff0000007ffe0000007\
ff8000000ffe0000000ffc0000000ff00000001fe00000007fe0000000ff\
e0000003ffc000000fffc000003fffc000007f7f800001fe7f800007f87f\
80000fe0ff00003fc0ff0000ff00ff0001fc01fe0007f801fe001fe001fe\
003f8003fc00ff0003fc03fc0007fc0ff00007f81fe00007f87f80000ff9\
fe00000ff7fc00000ff"
self.ymatrixwid = 39
self.ymatrixhei = 64
self.ymatrixori = 17
self.ymatrix = "fe000001fffe000007f9fc00000ff3f800001fc7f800007f8ff00000ff0f\
e00001fc1fe00007f83fc0000ff03f80001fc07f80007f80ff0000ff00fe\
0001fc01fe0007f803fc000fe003f8001fc007f8007f800ff000fe000fe0\
01fc001fe007f8003fc00fe0003f803fc0007f807f8000ff00fe0000fe03\
fc0001fc07f00003fc0fe00003f83fc00007f07f00000ff0fe00000fe3fc\
00001fc7f000003fcfe000003fbfc000007f7f000000fffe000000fff800\
0001fff0000003ffe0000003ff80000007ff0000000ffe0000000ff80000\
001ff00000003fe00000003f80000000ff00000001fe00000003f8000000\
0ff00000001fc00000007f80000000ff00000001fc00000007f80000001f\
f00000007fc000003fff8000007ffe000000fff8000001ffe0000003ff80\
000007fe00000003e0000000"
self.zmatrixwid = 36
self.zmatrixhei = 45
self.zmatrixori = 17
self.zmatrix = "7fffffffe7fffffffe7fffffffe7fffffffe7fffffffe7fffffffe000000\
3fc0000007f8000000ff8000001ff0000001fe0000003fc0000007f80000\
00ff8000001ff0000003fe0000003fc0000007f8000000ff8000001ff000\
0003fe0000003fc0000007f8000000ff8000001ff0000003fe0000003fc0\
000007f8000000ff8000001ff0000003fe0000003fc0000007fc000000ff\
8000001ff0000003fe0000003fc0000007fc000000ff8000000fffffffff\
fffffffffffffffffffffffffffffffffffffffffffff"
self.Amatrixwid = 55
self.Amatrixhei = 62
self.Amatrixori = 0
self.Amatrix = "ffe00000000001ffc00000000003ffc0000000000fff80000000001fff00\
000000003fff0000000000fffe0000000001fffc0000000003fffc000000\
000ff7f8000000001feff0000000007fcff000000000ff1fe000000001fe\
3fc000000007f83fc00000000ff07f800000001fe0ff800000007f80ff00\
000000ff01fe00000001fe03fe00000007f803fc0000000ff007f8000000\
1fe00ff80000007f800ff0000000ff001fe0000003fe003fe0000007f800\
3fc000000ff0007f8000003fc000ff8000007f8000ff000000ff0001fe00\
0003fc0003fe000007f80003fc00000ff00007fc00003fc0000ff800007f\
80000ff00000ff00001ff00003ffffffffe00007ffffffffc0001fffffff\
ffc0003fffffffff80007fffffffff0001ffffffffff0003fffffffffe00\
07f8000003fc001ff0000007fc003fc000000ff8007f8000000ff001ff00\
00001ff003fc0000003fe007f80000003fc01ff00000007fc03fc0000000\
ff80ff80000000ff81ff00000001ff03fc00000003fe0ff800000003fe1f\
e000000007fc3fc00000000ff8ff800000000ff9fe000000001ff7fc0000\
00001ff"
self.Bmatrixwid = 46
self.Bmatrixhei = 62
self.Bmatrixori = 0
self.Bmatrix = "fffffffc0003ffffffff000fffffffff003ffffffffe00fffffffffc03ff\
fffffff80ffffffffff03fc00001ffe0ff000001ff83fc000003ff0ff000\
0007fc3fc000000ff0ff0000003fe3fc0000007f8ff0000001fe3fc00000\
07f8ff0000001fe3fc0000007f8ff0000001fe3fc0000007f0ff0000001f\
c3fc000000ff0ff0000003f83fc000001fe0ff000000ff83fc000007fc0f\
f00000ffe03fffffffff00fffffffff003ffffffff800fffffffff803fff\
ffffff80ffffffffff03ffffffffff0ff000001ffc3fc000001ff8ff0000\
001ff3fc0000003feff0000000ffbfc0000001feff00000007fbfc000000\
0ffff00000003fffc0000000ffff00000003fffc0000000ffff00000003f\
bfc0000000feff00000007fbfc0000001feff00000007fbfc0000003fcff\
0000001ff3fc000000ff8ff000000ffe3ffffffffff0ffffffffff83ffff\
fffffc0fffffffffe03fffffffff00fffffffff003fffffffc000"
self.Cmatrixwid = 53
self.Cmatrixhei = 65
self.Cmatrixori = -1
self.Cmatrix = "3ff8000000001ffffc00000007fffffc000000fffffff000001fffffffe0\
0001ffffffff80001ffffffffe0001fff000fff8001ffc0001ffc001ffc0\
0003ff001ff800000ffc00ff8000003fe00ff8000000ff807f80000003fc\
07fc0000001fe03fc00000007f83fc00000003fc1fe00000001fe1fe0000\
00007f8ff000000003fc7f800000000007f800000000003fc00000000001\
fe00000000000ff000000000007f800000000003f800000000003fc00000\
000001fe00000000000ff000000000007f800000000003fc00000000001f\
e00000000000ff000000000007f800000000003fc00000000001fe000000\
00000ff000000000007f800000000001fe00000000000ff000000000ff7f\
8000000007fbfc000000003fdfe000000003fc7f800000001fe3fc000000\
00ff1ff000000007f87f800000007fc3fe00000003fc0ff00000003fe07f\
c0000001ff01ff0000001ff00ffc000000ff803ff000000ff801ffc00000\
ffc007ff00000ffc001ffe0001ffc0007ffe007ffc0001ffffffffe00007\
fffffffc00001fffffffc000003ffffffc0000007fffff800000007fffe0\
000000003ff00000"
self.Dmatrixwid = 49
self.Dmatrixhei = 62
self.Dmatrixori = 0
self.Dmatrix = "3ffffffe00001ffffffff0000ffffffffe0007ffffffffc003fffffffff0\
01fffffffffe00ffffffffff807f800007ffe03fc000007ff01fe000000f\
fc0ff0000003ff07f8000000ff83fc0000003fe1fe0000000ff0ff000000\
07fc7f80000001fe3fc0000000ff9fe00000003fcff00000001fe7f80000\
0007fbfc00000003fdfe00000001feff00000000ff7f800000007fbfc000\
00001fffe00000000ffff000000007fff800000003fffc00000001fffe00\
000000ffff000000007fff800000003fffc00000001fffe00000000ffff0\
00000007fff800000003fffc00000001fffe00000000ffff00000000ff7f\
800000007fbfc00000003fdfe00000001feff00000000ff7f80000000ff3\
fc00000007f9fe00000007fcff00000003fc7f80000003fe3fc0000001fe\
1fe0000001ff0ff0000001ff07f8000001ff83fc000001ff81fe000003ff\
80ff00000fffc07fffffffffc03fffffffff801fffffffff800fffffffff\
8007ffffffff0003fffffffe0001fffffff00000"
self.Ematrixwid = 44
self.Ematrixhei = 62
self.Ematrixori = 0
self.Ematrix = "ffffffffffeffffffffffeffffffffffeffffffffffeffffffffffefffff\
fffffeffffffffffeff000000000ff000000000ff000000000ff00000000\
0ff000000000ff000000000ff000000000ff000000000ff000000000ff00\
0000000ff000000000ff000000000ff000000000ff000000000ff0000000\
00ff000000000ff000000000ff000000000ff000000000ff000000000fff\
fffffffcffffffffffcffffffffffcffffffffffcffffffffffcffffffff\
ffcffffffffffcff000000000ff000000000ff000000000ff000000000ff\
000000000ff000000000ff000000000ff000000000ff000000000ff00000\
0000ff000000000ff000000000ff000000000ff000000000ff000000000f\
f000000000ff000000000ff000000000ff000000000ff000000000ff0000\
00000fffffffffffffffffffffffffffffffffffffffffffffffffffffff\
ffffffffffffffffffffff"
self.Fmatrixwid = 42
self.Fmatrixhei = 62
self.Fmatrixori = 0
self.Fmatrix = "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff\
fffffffffffffffc00000000ff000000003fc00000000ff000000003fc00\
000000ff000000003fc00000000ff000000003fc00000000ff000000003f\
c00000000ff000000003fc00000000ff000000003fc00000000ff0000000\
03fc00000000ff000000003fc00000000ff000000003fffffffff0ffffff\
fffc3fffffffff0fffffffffc3fffffffff0fffffffffc3fffffffff0ff0\
00000003fc00000000ff000000003fc00000000ff000000003fc00000000\
ff000000003fc00000000ff000000003fc00000000ff000000003fc00000\
000ff000000003fc00000000ff000000003fc00000000ff000000003fc00\
000000ff000000003fc00000000ff000000003fc00000000ff000000003f\
c00000000ff000000003fc00000000ff000000003fc00000000"
self.Gmatrixwid = 56
self.Gmatrixhei = 65
self.Gmatrixori = -1
self.Gmatrix = "ffe0000000001fffff000000007fffffc0000001fffffff0000007ffffff\
fc00000ffffffffe00003fffffffff00007fff000fff8000fff80003ffc0\
00ffe00000ffe001ff8000003ff003ff0000001ff007fe0000000ff807fc\
0000000ff80ff800000007fc0ff800000003fc1ff000000003fc1fe00000\
0003fc1fe000000001fe3fc000000001fe3fc000000000003fc000000000\
007f8000000000007f8000000000007f8000000000007f8000000000007f\
000000000000ff000000000000ff000000000000ff000000000000ff0000\
03ffffffff000003ffffffff000003ffffffff000003ffffffff000003ff\
ffffff000003ffffffff000003ffffffff00000000007f7f80000000007f\
7f80000000007f7f80000000007f7f80000000007f7fc0000000007f3fc0\
000000007f3fc000000000ff3fe000000000ff1fe000000000ff1ff00000\
0001ff1ff000000001ff0ff800000003ff0ffc00000007ff07fc00000007\
ff03fe0000000fff03ff0000001fff01ffc000007fff00ffe00000ffff00\
7ffc0003ff7f003fff001ffe3f001ffffffffc3f000ffffffff83f0007ff\
ffffe03f0001ffffff801f00007fffff001f00000ffff80000000000ff00\
0000"
self.Hmatrixwid = 48
self.Hmatrixhei = 62
self.Hmatrixori = 0
self.Hmatrix = "ff00000000ffff00000000ffff00000000ffff00000000ffff00000000ff\
ff00000000ffff00000000ffff00000000ffff00000000ffff00000000ff\
ff00000000ffff00000000ffff00000000ffff00000000ffff00000000ff\
ff00000000ffff00000000ffff00000000ffff00000000ffff00000000ff\
ff00000000ffff00000000ffff00000000ffff00000000ffff00000000ff\
ff00000000ffff00000000ffffffffffffffffffffffffffffffffffffff\
ffffffffffffffffffffffffffffffffffffffffffffffffff00000000ff\
ff00000000ffff00000000ffff00000000ffff00000000ffff00000000ff\
ff00000000ffff00000000ffff00000000ffff00000000ffff00000000ff\
ff00000000ffff00000000ffff00000000ffff00000000ffff00000000ff\
ff00000000ffff00000000ffff00000000ffff00000000ffff00000000ff\
ff00000000ffff00000000ffff00000000ffff00000000ffff00000000ff\
ff00000000ffff00000000ff"
self.Imatrixwid = 8
self.Imatrixhei = 62
self.Imatrixori = 0
self.Imatrix = "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff\
ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff\
ffff"
self.Jmatrixwid = 35
self.Jmatrixhei = 64
self.Jmatrixori = 0
self.Jmatrix = "1fe0000003fc0000007f8000000ff0000001fe0000003fc0000007f80000\
00ff0000001fe0000003fc0000007f8000000ff0000001fe0000003fc000\
0007f8000000ff0000001fe0000003fc0000007f8000000ff0000001fe00\
00003fc0000007f8000000ff0000001fe0000003fc0000007f8000000ff0\
000001fe0000003fc0000007f8000000ff0000001fe0000003fc0000007f\
8000000ff0000001fe0000003fc0000007f8000000ff0000001fe0000003\
fffc00007fff80000ffff00001fffe00003fffc00007fff80000ffff0000\
1fffe00003fffc0000ffffc0001fe7f80003fcff8000ff9ff8003fe1ff80\
0ffc3ffc07ff03ffffffe03ffffff803fffffe003fffff0001ffffc0000f\
ffe000003fc000"
self.Kmatrixwid = 49
self.Kmatrixhei = 62
self.Kmatrixori = 0
self.Kmatrix = "3fc0000000ffffe0000000ffeff0000000ffe7f8000000ffe3fc000000ff\
e1fe000000ffe0ff000000ffe07f800000ffe03fc00000ffe01fe00000ff\
e00ff000007fe007f800007fe003fc00007fe001fe00007fe000ff00007f\
e0007f80007fe0003fc0007fe0001fe0007fe0000ff0007fe00007f8007f\
e00003fc007fe00001fe007fe00000ff007fe000007f807fe000003fc07f\
e000001fe07fe000000ff07ff8000007f87ffc000003fc7fff000001fe7f\
ffc00000ff7fffe000007fffcff800003fffc3fe00001fffc1ff00000fff\
c07fc00007ffc01ff00003ffc00ffc0001ffc003fe0000ffc000ff80007f\
c0007fe0003fc0001ff0001fe00007fc000ff00001ff0007f80000ff8003\
fc00003fe001fe00000ff800ff000007fe007f800001ff003fc000007fc0\
1fe000003ff00ff000000ff807f8000003fe03fc000001ff81fe0000007f\
c0ff0000001ff07f8000000ffc3fc0000003fe1fe0000000ff8ff0000000\
3fe7f80000001ffbfc00000007fdfe00000001ff"
self.Lmatrixwid = 39
self.Lmatrixhei = 62
self.Lmatrixori = 0
self.Lmatrix = "3fc00000007f80000000ff00000001fe00000003fc00000007f80000000f\
f00000001fe00000003fc00000007f80000000ff00000001fe00000003fc\
00000007f80000000ff00000001fe00000003fc00000007f80000000ff00\
000001fe00000003fc00000007f80000000ff00000001fe00000003fc000\
00007f80000000ff00000001fe00000003fc00000007f80000000ff00000\
001fe00000003fc00000007f80000000ff00000001fe00000003fc000000\
07f80000000ff00000001fe00000003fc00000007f80000000ff00000001\
fe00000003fc00000007f80000000ff00000001fe00000003fc00000007f\
80000000ff00000001fe00000003fc00000007f80000000ff00000001fff\
ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff\
fffff"
self.Mmatrixwid = 57
self.Mmatrixhei = 62
self.Mmatrixori = 0
self.Mmatrix = "3ff800000000fffffc000000007ffffe000000003fffff800000003fffff\
c00000001fffffe00000000ffffff80000000ffffffc00000007fffffe00\
000003ffffff80000003ffffffc0000001ffffffe0000000fffffbf80000\
00fefffdfc0000007f7ffefe0000003fbfff3f8000003f9fff9fc000001f\
cfffcfe000000fe7ffe3f800000fe3fff1fc000007f1fff8fe000003f8ff\
fc3f800003f87ffe1fc00001fc3fff0fe00000fe1fff83f80000fe0fffc1\
fc00007f07ffe0fe00003f83fff03f80003f81fff81fc0001fc0fffc0fe0\
000fe07ffe03f8000fe03fff01fc0007f01fff807e0003f80fffc03f8003\
f807ffe01fc001fc03fff007e000fe01fff803f800fe00fffc01fc007f00\
7ffe007e003f803fff003f803f801fff801fc01fc00fffc007e00fe007ff\
e003f80fe003fff001fc07f001fff8007e03f800fffc003f83f8007ffe00\
1fc1fc003fff0007e0fe001fff8003f8fe000fffc001fc7f0007ffe0007e\
3f8003fff0003fbf8001fff8001fdfc000fffc0007ffe0007ffe0003ffe0\
003fff0001fff0001fff80007ff8000fffc0003ff80007ffe0001ffc0003\
fff00007fe0001fff80003fe0000fffc0001ff00007f"
self.Nmatrixwid = 48
self.Nmatrixhei = 62
self.Nmatrixori = 0
self.Nmatrix = "ff000000007fff800000007fffc00000007fffc00000007fffe00000007f\
fff00000007ffff00000007ffff80000007ffffc0000007ffffc0000007f\
fffe0000007fffff0000007fffff0000007ffeff8000007ffe7f8000007f\
fe7fc000007ffe3fe000007ffe1fe000007ffe1ff000007ffe0ff800007f\
fe07f800007ffe07fc00007ffe03fe00007ffe01fe00007ffe01ff00007f\
fe00ff80007ffe007f80007ffe007fc0007ffe003fe0007ffe003fe0007f\
fe001ff0007ffe000ff8007ffe000ff8007ffe0007fc007ffe0003fe007f\
fe0003fe007ffe0001ff007ffe0000ff007ffe0000ff807ffe00007fc07f\
fe00003fc07ffe00003fe07ffe00001ff07ffe00000ff07ffe00000ff87f\
fe000007fc7ffe000003fc7ffe000003fe7ffe000001ff7ffe000001ff7f\
fe000000fffffe0000007ffffe0000007ffffe0000003ffffe0000001fff\
fe0000001ffffe0000000ffffe00000007fffe00000007fffe00000003ff\
fe00000001fffe00000001ff"
self.Omatrixwid = 60
self.Omatrixhei = 65
self.Omatrixori = -1
self.Omatrix = "fff00000000000fffff0000000007fffffe00000000fffffff80000003ff\
fffffc000000fffffffff000001fffffffff800003fff000fffc00007ff8\
0001ffe0000ffe000007ff0001ffc000003ff8003ff0000000ffc003fe00\
000007fc007fc00000003fe00ff800000001ff00ff800000001ff01ff000\
000000ff81fe0000000007f83fe0000000007f83fe0000000007fc3fc000\
0000003fc3fc0000000003fc7f80000000001fe7f80000000001fe7f8000\
0000001fe7f80000000001fe7f00000000000feff00000000000ffff0000\
0000000ffff00000000000ffff00000000000ffff00000000000ffff0000\
0000000ffff00000000000ffff00000000000ffff00000000000ffff0000\
0000000ffff00000000000ff7f00000000000fe7f80000000001fe7f8000\
0000001fe7f80000000001fe7f80000000001fe3fc0000000003fc3fc000\
0000003fc3fe0000000007fc1fe0000000007f81ff000000000ff80ff000\
000000ff00ff800000001ff00ffc00000003ff007fc00000003fe003ff00\
00000ffc003ff8000001ff8001ffc000003ff8000fff00000fff00007ffc\
0003ffe00003fff801fffc00001fffffffff800000fffffffff0000003ff\
fffffc0000000fffffff000000003fffffc0000000007fffe00000000000\
3fe000000"
self.Pmatrixwid = 44
self.Pmatrixhei = 62
self.Pmatrixori = 0
self.Pmatrix = "fffffffe000ffffffffc00fffffffff00fffffffffc0fffffffffe0fffff\
fffff0ffffffffff8ff000007ff8ff000001ffcff0000007fcff0000003f\
eff0000003feff0000001feff0000001feff0000000ffff0000000ffff00\
00000ffff0000000ffff0000000ffff0000000ffff0000000ffff0000000\
ffff0000001ffff0000001feff0000003feff0000007feff000000ffcff0\
00001ffcff000007ff8ffffffffff8ffffffffff0fffffffffe0ffffffff\
fc0fffffffff00ffffffffc00fffffffe000ff000000000ff000000000ff\
000000000ff000000000ff000000000ff000000000ff000000000ff00000\
0000ff000000000ff000000000ff000000000ff000000000ff000000000f\
f000000000ff000000000ff000000000ff000000000ff000000000ff0000\
00000ff000000000ff000000000ff000000000ff000000000ff000000000\
ff000000000ff000000000"
self.Qmatrixwid = 60
self.Qmatrixhei = 68
self.Qmatrixori = -1
self.Qmatrix = "fff00000000000fffff0000000007fffffe00000000fffffff00000003ff\
fffffc000000fffffffff000001fffffffff800003fff000fffc00007ff8\
0001ffe0000ffe000007ff0001ffc000003ff8003ff0000000ffc003fe00\
000007fc007fc00000003fe00ff800000001ff00ff800000001ff01ff000\
000000ff81fe0000000007f83fe0000000007fc3fe0000000007fc3fc000\
0000003fc3fc0000000003fc7f80000000001fe7f80000000001fe7f8000\
0000001fe7f80000000001fe7f00000000000feff00000000000ffff0000\
0000000ffff00000000000ffff00000000000ffff00000000000ffff0000\
0000000ffff00000000000ffff00000000000ffff00000000000ffff0000\
0000000ffff00000000000ff7f00000000000fe7f80000000001fe7f8000\
0000001fe7f80000000001fe7f80000000001fe3fc0000000003fc3fc000\
0000003fc3fe0000000007fc1fe0000006007f81ff000000f00ff80ff000\
001f80ff00ff800001fc1ff00ffc00003ff3ff007fc00001ffbfe003fe00\
000ffffc003ff800007fffc001ffc00001fff8000fff00000fff00007ff8\
0003ffe00003fff801ffff00001ffffffffff80000ffffffffffe00003ff\
ffffffff00000fffffff9ff800003fffffe07fc000007ffff003fe000000\
3fe0001fc0000000000000f8000000000000070000000000000010"
self.Rmatrixwid = 50
self.Rmatrixhei = 62
self.Rmatrixori = 0
self.Rmatrix = "ffffffffc0003fffffffff000ffffffffff003fffffffffe00ffffffffff\
e03ffffffffff80fffffffffff03fc000001ffe0ff0000001ff83fc00000\
03ff0ff00000007fc3fc0000000ff0ff00000003fe3fc00000007f8ff000\
00001fe3fc00000007f8ff00000001fe3fc00000007f8ff00000001fe3fc\
00000007f8ff00000001fc3fc00000007f0ff00000003fc3fc0000000ff0\
ff00000007f83fc0000003fc0ff0000001ff03fc000003ff80ffffffffff\
c03fffffffffc00fffffffffe003fffffffff800ffffffffff803fffffff\
fff00ffffffffffe03fc000001ffc0ff0000001ff03fc0000003fe0ff000\
00007f83fc0000000fe0ff00000003fc3fc00000007f0ff00000001fc3fc\
00000007f0ff00000001fc3fc00000007f0ff00000001fc3fc00000007f0\
ff00000001fc3fc00000007f0ff00000001fc3fc00000007f0ff00000001\
fc3fc00000007f0ff00000001fc3fc00000007f8ff00000001fe3fc00000\
007f8ff00000000ff3fc00000003feff00000000ffffc00000003ff"
self.Smatrixwid = 49
self.Smatrixhei = 65
self.Smatrixori = -1
self.Smatrix = "7ff800000003ffffc0000007fffff800000fffffff00000fffffffc0001f\
fffffff8001ffffffffe001fff000fff000ffc0000ffc00ffc00003ff007\
fc000007f807fc000001fe03fc000000ff01fe0000003f81fe0000001fc0\
ff0000000ff07f80000003f83fc0000001fc1fe0000000fe0ff000000000\
07fc0000000003fe0000000000ff80000000007fe0000000003ffc000000\
000fff8000000007fff800000001ffffc00000007ffffe0000001ffffff0\
000003ffffff8000007ffffff800000fffffff0000007fffffc0000003ff\
fff80000003ffffe00000001ffff800000000fffc000000001fff0000000\
003ff80000000007fe0000000001ff00000000007fbf800000003fffc000\
00000fffe000000007fff000000003fff800000001fefe00000000ff7f00\
0000007fbfc00000007f9fe00000003fc7f80000003fe3fe0000001fe0ff\
8000001ff07fe000003ff01ffc00007ff007ffe001fff801fffffffff800\
7ffffffff8001ffffffff00003fffffff000007fffffe000000fffff8000\
00003ff80000"
self.Tmatrixwid = 48
self.Tmatrixhei = 62
self.Tmatrixori = 0
self.Tmatrix = "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff\
ffffffffffffffffffffffff00000ff0000000000ff0000000000ff00000\
00000ff0000000000ff0000000000ff0000000000ff0000000000ff00000\
00000ff0000000000ff0000000000ff0000000000ff0000000000ff00000\
00000ff0000000000ff0000000000ff0000000000ff0000000000ff00000\
00000ff0000000000ff0000000000ff0000000000ff0000000000ff00000\
00000ff0000000000ff0000000000ff0000000000ff0000000000ff00000\
00000ff0000000000ff0000000000ff0000000000ff0000000000ff00000\
00000ff0000000000ff0000000000ff0000000000ff0000000000ff00000\
00000ff0000000000ff0000000000ff0000000000ff0000000000ff00000\
00000ff0000000000ff0000000000ff0000000000ff0000000000ff00000\
00000ff0000000000ff0000000000ff0000000000ff0000000000ff00000\
00000ff0000000000ff00000"
self.Umatrixwid = 48
self.Umatrixhei = 64
self.Umatrixori = 0
self.Umatrix = "ff00000000ffff00000000ffff00000000ffff00000000ffff00000000ff\
ff00000000ffff00000000ffff00000000ffff00000000ffff00000000ff\
ff00000000ffff00000000ffff00000000ffff00000000ffff00000000ff\
ff00000000ffff00000000ffff00000000ffff00000000ffff00000000ff\
ff00000000ffff00000000ffff00000000ffff00000000ffff00000000ff\
ff00000000ffff00000000ffff00000000ffff00000000ffff00000000ff\
ff00000000ffff00000000ffff00000000ffff00000000ffff00000000ff\
ff00000000ffff00000000ffff00000000ffff00000000ffff00000000ff\
ff00000000ffff00000000ffff00000000ffff00000000ffff00000000ff\
ff00000000ffff00000000ffff00000000ff7f80000001fe7f80000001fe\
7fc0000003fe7fc0000003fc3fe0000007fc3ff000000ff81ff800003ff8\
0ffe0000fff007ffc007ffe007ffffffffc003ffffffff8000ffffffff00\
007ffffffc00000ffffff0000003ffffc00000001ff80000"
self.Vmatrixwid = 52
self.Vmatrixhei = 62
self.Vmatrixori = 0
self.Vmatrix = "ff800000000ff7f800000000fe7fc00000001fe7fc00000001fe3fc00000\
001fc3fe00000003fc1fe00000003fc1fe00000003f81ff00000007f80ff\
00000007f80ff00000007f00ff8000000ff007f8000000ff007f8000000f\
e007fc000001fe003fc000001fe003fc000001fc003fe000003fc001fe00\
0003fc001fe000003f8000ff000007f8000ff000007f8000ff000007f000\
07f80000ff00007f80000ff00007fc0000fe00003fc0001fe00003fc0001\
fe00003fe0001fc00001fe0003fc00001fe0003fc00000ff0003f800000f\
f0007f800000ff0007f8000007f8007f0000007f800ff0000007f800ff00\
00003fc00fe0000003fc01fe0000003fc01fe0000001fe01fc0000001fe0\
3fc0000001fe03fc0000000ff03f80000000ff07f800000007f07f800000\
007f87f000000007f87f000000003fcff000000003fcfe000000003fcfe0\
00000001fffe000000001fffc000000001fffc000000000fff8000000000\
fff80000000007ff80000000007ff00000000007ff00000000003ff00000\
000003fe00000000003fe00000"
self.Wmatrixwid = 77
self.Wmatrixhei = 62
self.Wmatrixori = 0
self.Wmatrix = "3fe000000ff8000003ffff0000007fc000001ff7f8000003fe000000ff3f\
e000003ff800000ff9ff000001ffc000007fcff800000ffe000003fe3fc0\
0000fff000001fe1ff000007ffc00001ff0ff800003ffe00000ff87fc000\
01fff000007fc1fe00001fffc00003fc0ff80000fffe00003fe07fc00007\
f7f00001ff01fe00003fbf80000ff80ff00003f9fe00007f807f80001fc7\
f00003fc03fe0000fe3f80003fe00ff0000ff1fe0001fe007f80007f0ff0\
000ff003fc0003f83f80007f801ff0001fc1fc0007fc007f8001fe0ff000\
3fc003fc000fe03f8001fe001fe0007f01fc000ff000ff8003f80ff0007f\
8003fc003f807f8007f8001fe001fc01fc003fc000ff000fe00fe001fe00\
07f800ff007f800ff0001fe007f001fc00ff0000ff003f800fe007f80007\
f801fc007f003fc0003fc01fe003fc01fe0000ff00fe000fe00fe00007f8\
07f0007f00ff00003fc03f8003fc07f80001fe03f8001fe03fc00007f81f\
c0007f01fc00003fc0fe0003f81fe00001fe0ff0001fe0ff00000ff07f00\
007f07f800003f83f80003f83f800001fe1fc0001fe1fc00000ff1fe0000\
ff1fe000007f8fe00003f8ff000001fc7f00001fc7f000000ff3f80000ff\
3f8000007fbf800003fbfc000001fdfc00001fdfe000000fefe00000fffe\
0000007fff000007fff0000003fff000001fff8000000fff800000fffc00\
00007ffc000007ffc0000003ffe000001ffe0000001ffe000000fff00000\
007ff0000007ff80000003ff8000003ff80000001ffc000000ffc0000000\
ffc0000007fe00000003fe0000003ff00000001ff0000001ff0000"
self.Xmatrixwid = 53
self.Xmatrixhei = 62
self.Xmatrixori = 0
self.Xmatrix = "1ff80000000ffc7fe00000007fc1ff00000007fc07fc0000007fe03ff000\
0003fe00ff8000003fe003fe000003ff001ff800001ff0007fc00001ff00\
01ff00000ff8000ffc0000ff80003fe0000ff80000ff80007fc00007fe00\
07fc00001ff0007fc000007fc003fc000003ff003fe000000ff803fe0000\
003fe01fe0000001ff81ff00000007fc1ff00000001ff0ff000000007fcf\
f800000003feff800000000ffff8000000003fff8000000001fffc000000\
0007ffc0000000001ffc0000000000ffe00000000003fe00000000003ff8\
0000000003ffe0000000001fff8000000001fffc000000001ffff0000000\
01ff7fc00000000ffbfe00000000ff8ff80000000ff83fe00000007fc1ff\
00000007fc07fc0000007fc01ff0000003fe00ff8000003fe003fe000003\
fe000ff800001ff0007fe00001ff0001ff00001ff00007fc0001ff80003f\
f0000ff80000ff8000ff800003fe000ffc00001ff8007fc000007fc007fc\
000001ff007fe000000ffc03fe0000003fe03fe0000001ff83ff00000007\
fe1ff00000001ff1ff00000000ffdff800000003ff"
self.Ymatrixwid = 55
self.Ymatrixhei = 62
self.Ymatrixori = 0
self.Ymatrix = "3ff000000000ffbff000000003fe3fe00000000ffc7fe00000001ff07fe0\
0000007fc07fc0000000ff80ffc0000003fe00ff8000000ffc00ff800000\
1ff001ff8000007fc001ff000000ff8003ff000003fe0003fe000007fc00\
03fe00001ff00007fe00007fc00007fc0000ff800007fc0003fe00000ff8\
0007f800000ff8001ff000000ff8007fc000001ff000ff8000001ff003fe\
0000003fe007f80000003fe01ff00000003fe07fc00000007fc0ff000000\
007fc3fe000000007fc7f800000000ff9ff000000000ffbfc000000000ff\
ff0000000001fffe0000000001fff80000000003fff00000000003ffc000\
00000003ff000000000007fe000000000007f800000000000ff000000000\
001fe000000000003fc000000000007f800000000000ff000000000001fe\
000000000003fc000000000007f800000000000ff000000000001fe00000\
0000003fc000000000007f800000000000ff000000000001fe0000000000\
03fc000000000007f800000000000ff000000000001fe000000000003fc0\
00000000007f800000000000ff000000000001fe000000000003fc000000\
000007f800000"
self.Zmatrixwid = 48
self.Zmatrixhei = 62
self.Zmatrixori = 0
self.Zmatrix = "1ffffffffffe1ffffffffffe1ffffffffffe1ffffffffffe1ffffffffffe\
1ffffffffffe1ffffffffffe0000000007fe000000000ffc000000001ff8\
000000001ff8000000003ff0000000007fe000000000ffc000000000ffc0\
00000001ff8000000003ff0000000007fe0000000007fc000000000ffc00\
0000001ff8000000003ff0000000007fe0000000007fe000000000ffc000\
000001ff8000000003ff0000000003ff0000000007fe000000000ffc0000\
00001ff8000000001ff8000000003ff0000000007fe000000000ffc00000\
0000ff8000000001ff8000000003ff0000000007fe000000000ffc000000\
000ffc000000001ff8000000003ff0000000007fe0000000007fe0000000\
00ffc000000001ff8000000003ff0000000003ff0000000007fe00000000\
0ffc000000001ff8000000001ff0000000003ff0000000007fe000000000\
ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff\
ffffffffffffffffffffffff"
self.onematrixwid = 21
self.onematrixhei = 60
self.onematrixori = 2
self.onematrix = "f80007c0007e0003f0001f8001fc000fe000ff000ff800ffc03ffe1fffff\
fffffffffffffffffffffffff8003fc001fe000ff0007f8003fc001fe000\
ff0007f8003fc001fc000fe0007f0003f8001fc000fe0007f0003f8001fc\
000fe0007f0003f8001fc000fe0007f0003f8001fc000fe0007f0003f800\
1fc000fe0007f0003f8001fc000fe0007f0003f8001fc000fe0007f0003f\
8001fc000fe"
self.twomatrixwid = 40
self.twomatrixhei = 60
self.twomatrixori = 2
self.twomatrix = "1ffc000000ffff800003ffffe0000ffffff8001ffffffc003ffffffe007f\
ffffff007ff007ff80ffc000ffc1ff00007fc1fe00003fe1fe00001fe3fc\
00000fe3f800000fe3f800000ff3f8000007f7f8000007f7f0000007f7f0\
000007f7f0000007f7f0000007f00000000ff00000000fe00000001fe000\
00001fe00000003fc00000007fc0000000ff80000003ff00000007ff0000\
001ffe0000007ffc000000fff8000003ffe000000fffc000003fff000000\
7ffe000001fff8000003ffe000000fff8000001ffe0000003ff80000007f\
e00000007fc0000000ff80000001fe00000001fc00000003fc00000003f8\
00000007f000000007f000000007f000000007f00000000fffffffffffff\
fffffffffffffffffffffffffffffffffffffffffffffffffffffffff"
self.threematrixwid = 40
self.threematrixhei = 62
self.threematrixori = 2
self.threematrix = "1ff8000001ffff800007ffffe0000ffffff0003ffffff8007ffffffc007f\
fffffe00ffe007ff01ff8001ff01fe0000ff81fc00007f83fc00003f83f8\
00003fc3f800001fc3f800001fc7f000001fc7f000001fc7f000001fc7f0\
00001fc00000001fc00000003f800000003f800000007f80000000ff0000\
0001ff0000001ffe00000ffffc00000ffff000000fffe000000ffff80000\
0ffffc00000fffff00000003ff80000000ffc00000003fc00000001fe000\
00001fe00000000fe00000000ff000000007f000000007f000000007ffe0\
000007ffe0000007ffe0000007ffe0000007f7f0000007f7f000000fe7f0\
00000fe7f800001fe3f800001fe3fc00003fc3fe00007fc1ff8001ff81ff\
e007ff80ffffffff007ffffffe003ffffffc001ffffff00007ffffe00001\
ffff0000001ff0000"
self.fourmatrixwid = 41
self.fourmatrixhei = 60
self.fourmatrixori = 2
self.fourmatrix = "1f000000001f800000001fc00000001fe00000000ff00000000ff8000000\
0ffc00000007fe00000007ff00000007ff80000007ffc0000003ffe00000\
03fff0000003fff8000003fdfc000001fcfe000001fc7f000001fe3f8000\
00fe1fc00000fe0fe00000ff07f00000ff03f800007f01fc00007f00fe00\
007f807f00003f803f80003f801fc0003fc00fe0003fc007f0001fc003f8\
001fc001fc001fe000fe000fe0007f000fe0003f800ff0001fc00ff0000f\
e007f00007f007f00003f803f80001fc01ffffffffffffffffffffffffff\
fffffffffffffffffffffffffffffffffffffffffffffc000001fc000000\
00fe000000007f000000003f800000001fc00000000fe000000007f00000\
0003f800000001fc00000000fe000000007f000000003f800000001fc000\
00000fe00"
self.fivematrixwid = 40
self.fivematrixhei = 62
self.fivematrixori = 2
self.fivematrix = "3fffffff803fffffff803fffffff803fffffff807fffffff807fffffff80\
7fffffff807f000000007f000000007f000000007e00000000fe00000000\
fe00000000fe00000000fe00000000fe00000000fe00000000fc00000000\
fc00000001fc00000001fc07f80001fc3fff0001fcffffe001fffffff001\
fffffff801fffffffe01fffffffe03ffe00fff03ff8003ff83fe0000ff83\
fc00007fc3f800003fc00000001fe00000001fe00000000fe00000000fe0\
0000000ff000000007f000000007f000000007f000000007f000000007f0\
00000007f000000007f000000007ffe0000007efe000000fe7f000000fe7\
f000001fe7f800001fc3f800003fc3fc00007f83fe0000ff81ff8001ff00\
ffe00ffe00fffffffe007ffffffc003ffffff8000fffffe00007ffffc000\
00fffe0000001ff0000"
self.sixmatrixwid = 39
self.sixmatrixhei = 62
self.sixmatrixori = 2
self.sixmatrix = "1ff0000001fffc00000ffffe00007ffffe0001fffffe0007fffffe001fff\
fffe007ff00ffc01ff8007fc03fc0007f80ff00007f81fe00007f07f8000\
0fe0fe00000fe1fc00001fc7f00000000fe00000001fc00000007f800000\
00fe00000001fc00000003f800000007f003f8000fe07ffe003fc3ffff00\
7f8fffff80ff3fffff81feffffff83ffffffff87fff807ff8fffc001ff1f\
fe0001ff3ff80001fe7ff00001feffc00003fdff000003fbfe000007f7fc\
00000ffff000000fffe000001fdfc000003fbf8000007f7f000000fefe00\
0001fdfc000003fbf8000007f3f800001fc7f000003f8ff000007f0fe000\
01fe1fe00007f81fe0000ff03fe0007fc07fe001ff807ff00ffe007fffff\
f8007fffffe0007fffff80007ffffe00007ffff000001fff80000003f800\
0"
self.sevenmatrixwid = 40
self.sevenmatrixhei = 60
self.sevenmatrixori = 2
self.sevenmatrix = "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff\
ffffffffff00000000fe00000000fe00000001fc00000003f800000003f8\
00000007f00000000fe00000000fc00000001fc00000003f800000003f80\
0000007f00000000fe00000000fe00000001fc00000001fc00000003f800\
000003f000000007f00000000fe00000000fe00000001fc00000001fc000\
00003f800000003f800000007f000000007f00000000ff00000000fe0000\
0001fe00000001fc00000001fc00000003f800000003f800000007f80000\
0007f000000007f00000000ff00000000fe00000000fe00000001fe00000\
001fe00000001fc00000003fc00000003fc00000003f800000003f800000\
007f800000007f800000007f800000007f00000000ff00000000ff000000"
self.eightmatrixwid = 40
self.eightmatrixhei = 62
self.eightmatrixori = 2
self.eightmatrix = "ff80000007fff000003ffffe00007fffff0000ffffff8001ffffffc003ff\
ffffe007ff007ff007fc001ff00ff8000ff80ff00007f80fe00003f81fe0\
0003fc1fc00001fc1fc00001fc1fc00001fc1fc00001fc1fc00001fc1fc0\
0001fc1fe00003fc0fe00003f80ff00007f807f8000ff007fe003ff003ff\
80ffe001ffffffc0007fffff00003ffffe00007ffffe0001ffffff8003ff\
ffffc007ff00fff00ff8001ff01ff0000ff83fc00003fc3f800001fc7f80\
0001fe7f000000fe7f000000fefe0000007ffe0000007ffe0000007ffe00\
00007ffe0000007ffe0000007ffe0000007ffe0000007f7f000000fe7f00\
0000fe7f800001fe7f800001fe3fc00003fc3fe0000ffc1ff8001ff80ffe\
00fff007ffffffe007ffffffe001ffffff8000ffffff00003ffffc00000f\
fff0000000ff0000"
self.ninematrixwid = 39
self.ninematrixhei = 62
self.ninematrixori = 2
self.ninematrix = "ffc000000ffff000007ffff80001fffff80007fffff8003ffffff8007fff\
fff801ff803ff807fe001ff80ff0000ff03fc0000ff07f00001fe1fe0000\
1fe3f800001fc7f000003f8fe000007f3f8000007f7f000000fefe000001\
fdfc000003fbf8000007f7f000000fefe000001fffc000003fdfc00000ff\
bf800001ff7f800007feff00000ffcff00003ff9ff0000fff1ff0007ffe3\
ff803fffc3ffffffff83fffffeff03fffff9fe03ffffe3fc01ffff07f800\
fffc0fe0003f801fc00000003f800000007f00000000fe00000003fc0000\
0007f00000000fe00000001fc7f000007f8fe00000fe0fe00003fc1fc000\
07f03fc0001fe07fc0007fc07f8001ff00ffc007fc00ffc03ff800ffffff\
e001ffffff8001fffffe0001fffff80000ffffc000007ffe0000001fc000\
0"
self.tenmatrixwid = 39
self.tenmatrixhei = 62
self.tenmatrixori = 2
self.tenmatrix = "7fc0000007fff000003ffff80000fffff80003fffff8000ffffff8003fff\
fff800ffe03ff801ff001ff007f8001ff00ff0001fe03fc0001fe07f0000\
1fc1fe00003fc3f800003f87f000007f0fe00000fe3f800000fe7f000001\
fcfe000003f9fc000007f3f800000fe7e000000fdfc000001fff8000003f\
ff0000007ffe000000fffc000001fff8000003fff0000007ffe000000fff\
c000001fff8000003fff0000007ffe000000fffc000001fff8000003fff0\
000007ffe000000fefc000001fdfc000007f3f800000fe7f000001fcfe00\
0003f9fc000007f3f800000fe3f800003f87f000007f0ff00000fe0fe000\
03f81fc00007f03fc0001fc03fc0007f807fc001ff007fc007fc007fe03f\
f0007fffffc000ffffff00007ffffc00007ffff000003fff8000000ff800\
0"
self._matrixwid = 51
self._matrixhei = 4
self._matrixori = 73
self._matrix = "fffffffffffffffffffffffffffffffffffffffffffffffffff"
self.minusmatrixwid = 20
self.minusmatrixhei = 6
self.minusmatrixori = 36
self.minusmatrix = "ffffffffffffffffffffffffffffff"
self.plusmatrixwid = 42
self.plusmatrixhei = 42
self.plusmatrixori = 21
self.plusmatrix = "3f000000000fc000000003f000000000fc000000003f000000000fc00000\
0003f000000000fc000000003f000000000fc000000003f000000000fc00\
0000003f000000000fc000000003f000000000fc000000003f000000000f\
c0000fffffffffffffffffffffffffffffffffffffffffffffffffffffff\
ffffffff00003f000000000fc000000003f000000000fc000000003f0000\
00000fc000000003f000000000fc000000003f000000000fc000000003f0\
00000000fc000000003f000000000fc000000003f000000000fc00000000\
3f000000000fc0000"
self.equalmatrixwid = 41
self.equalmatrixhei = 21
self.equalmatrixori = 32
self.equalmatrix = "1fffffffffffffffffffffffffffffffffffffffffffffffffffffffffff\
ff8000000000000000000000000000000000000000000000000000000000\
00000000000000000000000000000000003fffffffffffffffffffffffff\
ffffffffffffffffffffffffffffffffffff"
self.exclmatrixwid = 7
self.exclmatrixhei = 62
self.exclmatrixori = 0
self.exclmatrix = "3fffffffffffffffffffffffffffffffffffffffffffffffffffffffffbe\
7cf9f3e7cf9f3e3870e1c3870000000007fffffffffffffff"
self.atmatrixwid = 78
self.atmatrixhei = 75
self.atmatrixori = -1
self.atmatrix = "3ffc000000000000001fffff80000000000003ffffffc000000000003fff\
ffffc00000000007ffffffffc0000000003fffffffffc000000003ffffff\
ffff800000003fffc000ffff80000001fff000007fff0000000fff000000\
3ffe000000fff00000003ffc000007ff000000003ff800003ff000000000\
7ff00001ff80000000007fe0000ffc0000000000ffc0007fc00000000001\
ff8001fe000000000003fe000ff0000000000007fc007fc000000000000f\
f801fe000003f800003fe00ff00000fff800007f807f80000ffff03f80ff\
01fc00007ffff1fc03fc0ff00007ffffc7f007f83f80003ff03fbfc01fe1\
fe0001ff003ffe003f87f0000ff0007ff800fe1fc0007f8000ffe003fcfe\
0001fc0003ff0007f3f8000fe00007fc001fcfc0007f80001ff0007f7f00\
01fc00007fc001fdfc000fe00001fe0007f7f0003f800007f8001fdf8001\
fe00001fe0007f7e0007f000007f0001fff8001fc00003fc0007ffe0007f\
00000ff0001fbf8003f800003f8000fefe000fe00000fe0003fbf8003f80\
0007f8000fefe000fe00001fc0007f3f8003f800007f0001fcfe000fe000\
03fc000ff3f8003f80000fe0003f8fe000fe00007f8001fe1fc003fc0001\
fe0007f07f0007f0000ff0003fc1fc001fe0007fc001fe07f8007f8003ff\
000ff00fe000ff001ffe007fc03fc003fe00fff807fe00ff0007fe0feff8\
7ff001fe000fffff3fffff8007f8001ffff8fffffc000ff0003fffc1ffff\
c0003fe0007ffc03fffc00007fc0003f8003ff800001ff80000000000000\
0003ff000000000000000007fe00000000000000000ffc00000000000000\
001ffc00000000000000003ff800000000000000007ff800000000000000\
00fff80000000000000001fffc000003c000000001ffff0001ff80000000\
03fffffffffe0000000003fffffffff80000000003fffffffff000000000\
03ffffffff800000000001fffffff0000000000000fffffc000000000000\
001ffc00000000"
self.hashmatrixwid = 45
self.hashmatrixhei = 60
self.hashmatrixori = 3
self.hashmatrix = "7e003f000007f003f800003f801fc00001f800fe00000fc007e000007e00\
3f000007f003f800003f801fc00001fc00fe00000fc007e000007e003f00\
0003f001f800003f801fc00001fc00fe00000fc007e000007e003f001fff\
fffffff8ffffffffffc7fffffffffe3ffffffffff1ffffffffff8fffffff\
fffc003f801fc00001fc00fe00000fe007f000007e003f000003f001f800\
001f800fc00001fc00fe00000fe007f000007f003f800003f001f800001f\
800fc00000fc007e00000fe007f000007f003f800003f001fc007fffffff\
ffe3ffffffffff1ffffffffff8ffffffffffc7fffffffffe3ffffffffff0\
00fc007e00000fe003f000007f003f800003f801fc00001f800fe00000fc\
007e000007e003f000007f003f800003f801fc00001f800fe00000fc007e\
000007e003f000007f003f800003f801fc00001f800fe00000fc007e0000\
07e003f0000"
self.dollarmatrixwid = 41
self.dollarmatrixhei = 77
self.dollarmatrixori = -4
self.dollarmatrix = "7c000000003e000000001f000000000f8000000007c00000000ffc000000\
7fffc00000fffff80001ffffff0003ffffffc003fffffff003ffcf8ffc01\
ff07c1fe01fe03e07f80fe01f01fc0fe00f80ff07f007c03f83f003e01fc\
3f801f007f1fc00f803f8fe007c01fc7f003e00fe3f801f00001fc00f800\
00fe007c00007f803e00001fc01f00000ff00f800007fc07c00001ff83e0\
00007ff1f000003ffff800000ffffe000003fffff00000ffffff00001fff\
ffe00003fffff800003fffff000003ffffc00000fffff000007c7ff80000\
3e0ffe00001f01ff00000f807fc00007c01fe00003e007f00001f001f800\
00f800fffc007c007ffe003e003fff001f001fff800f800fffc007c007ff\
f003e003f3f801f001f9fc00f801fcff007c00fe3f803e00fe1fe01f00ff\
0ff80f80ff03ff07c1ff80ffe3e3ff803fffffff800fffffff8003ffffff\
80007fffff00000ffffe0000007ff000000007c000000003e000000001f0\
00000000f8000000007c000000003e000000001f000000000f8000000007\
c0000"
self.percentmatrixwid = 71
self.percentmatrixhei = 61
self.percentmatrixori = 2
self.percentmatrix = "1f00000000000000007e0000003fc0000001f8000003fff0000003e00000\
0ffff800000fc000007ffff800001f000001fffff800007e000007fffff8\
0000f800000ff81ff80003f000003fc00ff00007c00000ff000ff0001f80\
0001fc000fe0003e000003f0000fc000fc00000fe0001fc003f000001f80\
001f8007e000003f00003f001f8000007e00007e003e000000fc0000fc00\
fc000001f80001f801f0000003f80007f007e0000003f0000fc00f800000\
07f0003f803f0000000ff000ff00fc0000000ff003fc01f80000000ff81f\
f007e00000001fffffe00fc00000001fffff803f000000001ffffe007c00\
0000000ffff001f80000000007ff8003e00000000001fc000fc000000000\
0000001f00000000000000007e0000000000000001f80007f00000000003\
f0007ffc000000000fc003fffe000000001f800ffffe000000007e003fff\
fe00000000f800fffffe00000003f003fe03fe00000007c007f803fc0000\
001f801fc001fc0000007e003f8003f8000000fc007e0003f0000003f001\
f80003f0000007e003f00007e000001f8007e0000fc000003f000fc0001f\
800000fc001f80003f000001f0003f00007e000007e0003f0001f800000f\
80007f0007f000003f0000fe000fe00000fc0000ff007f800001f80001ff\
01ff000007e00001fffffc00000fc00001fffff000003f000001ffffc000\
007e000001ffff000001f8000000fff8000007f00000003f800"
self.hatmatrixwid = 32
self.hatmatrixhei = 32
self.hatmatrixori = 2
self.hatmatrix = "7e000000ff000000ff000001ff000001ff800001ff800003ffc00003ffc0\
0007e7c00007e7e00007c7e0000fc3f0000fc3f0001f81f0001f81f8001f\
01f8003f00fc003f00fc007e007c007e007e007c007e00fc003f00fc003f\
01f8001f81f8001f81f0001f83f0000fc3f0000fc7e00007e7e00007efc0\
0003efc00003f"
self.ampmatrixwid = 50
self.ampmatrixhei = 62
self.ampmatrixori = 2
self.ampmatrix = "3fc0000000007ffe000000007fffc00000003ffffc0000001fffff800000\
0fffffe0000007fc07fc000001fe00ff800000fe001fe000003f8003f800\
001fc0007f000007f0001fc00001fc0007f000007f0001fc00001fc0007f\
000007f0001fc00000fe000fe000003f8003f800000ff001fe000001fe00\
ff0000007f80ffc000000ff07fe0000001fe3ff00000007ffff80000000f\
fff800000001fffc000000007ffc000000001ffe000000001fff00000000\
0fffe00000000ffffc00000007feff80000003ff1ff003f801ff07fe00fe\
00ff80ff803f807fc01ff00fe03fe003fe07f00ff0007fc1fc07f8000ff8\
7f01fc0003ff3fc07f00007fcfe03f80000ffff80fe00001fffc03f80000\
3fff00fe000007ff803f800001ffe00fe000003ff003f8000007fc00ff00\
0001ff801fc00000fff007f800007ffe01ff00003fffc03fe0001ffff00f\
fc001ffbfe01ffe03ffc7fc07ffffffe0ff80fffffff03ff01ffffff807f\
c03fffff800ff803ffffc001ff003fff8000000000ff0000000"
self.strixmatrixwid = 25
self.strixmatrixhei = 24
self.strixmatrixori = 0
self.strixmatrix = "3e00001f00000f800007c00003e00001f00100f804e07c0efe3e3fffffff\
fffffffffffff0ffff8007fc0003fe0003ff8001ffc001fbf001f8fc01f8\
3f01fc1fc0fc07e01c01c00600c0"
self.opencparmatrixwid = 18
self.opencparmatrixhei = 80
self.opencparmatrixori = 0
self.opencparmatrix = "3c001f000f8007e001f000f8003e001f000fc003f001f8007e001f000fc0\
03e001f8007e003f800fc003f001fc007e001f8007e003f800fc003f000f\
c007f001fc007f001f8007e001f800fe003f800fe003f800fe003f800fe0\
03f800fe003f800fe003f8007e001f8007e001fc007f001fc003f000fc00\
3f000fe003f8007e001f8007f000fc003f000fc001f8007e000f8003f000\
fc001f8007e000f8003f0007c000f8003e0007c001f0003e0007c001f"
self.closecparmatrixwid = 18
self.closecparmatrixhei = 80
self.closecparmatrixori = 0
self.closecparmatrix = "f0003e0007c001f8003e0007c001f0007e000fc003f0007e001f8003e000\
fc003f0007e001f8007f000fc003f000fe001f8007e001f8007f000fc003\
f000fc003f800fe003f8007e001f8007e001fc007f001fc007f001fc007f\
001fc007f001fc007f001fc007f001f8007e001f800fe003f800fe003f00\
0fc003f001fc007f001f8007e003f800fc003f000fc007e001f8007c003f\
000fc007e001f8007c003f000f8007c001f000f8003e001f000f8003e000"
self.opensparmatrixwid = 16
self.opensparmatrixhei = 80
self.opensparmatrixori = 0
self.opensparmatrix = "fffffffffffffffffffffffffe00fe00fe00fe00fe00fe00fe00fe00fe00\
fe00fe00fe00fe00fe00fe00fe00fe00fe00fe00fe00fe00fe00fe00fe00\
fe00fe00fe00fe00fe00fe00fe00fe00fe00fe00fe00fe00fe00fe00fe00\
fe00fe00fe00fe00fe00fe00fe00fe00fe00fe00fe00fe00fe00fe00fe00\
fe00fe00fe00fe00fe00fe00fe00fe00fe00fe00fe00fe00fe00fe00ffff\
ffffffffffffffffffff"
self.closesparmatrixwid = 16
self.closesparmatrixhei = 80
self.closesparmatrixori = 0
self.closesparmatrix = "ffffffffffffffffffffffff007f007f007f007f007f007f007f007f007f\
007f007f007f007f007f007f007f007f007f007f007f007f007f007f007f\
007f007f007f007f007f007f007f007f007f007f007f007f007f007f007f\
007f007f007f007f007f007f007f007f007f007f007f007f007f007f007f\
007f007f007f007f007f007f007f007f007f007f007f007f007f007fffff\
ffffffffffffffffffff"
self.backslashmatrixwid = 25
self.backslashmatrixhei = 63
self.backslashmatrixori = 0
self.backslashmatrix = "7c00001e00000f800007c00001e00000f800007c00003e00000f000007c0\
0003e00000f000007c00003e00000f000007c00003e00000f000007c0000\
3e00000f000007c00003e00000f000007c00003e00000f000007c00003e0\
0000f000007c00003e00001f000007800003e00001f000007800003e0000\
1f000007800003e00001f000007800003e00001f000007800003e00001f0\
00007800003e00001f000007800003e00001f000007800003e00001f0000\
0f800003c00001f00000f800003c00001f"
self.semicolmatrixwid = 9
self.semicolmatrixhei = 57
self.semicolmatrixori = 17
self.semicolmatrix = "1ffffffffffffffffffff000000000000000000000000000000000000000\
0000000000000000000001ffffffffffffffffffff0783c1e0f078383c1e\
1e7e3e1c0"
self.postmatrixwid = 8
self.postmatrixhei = 21
self.postmatrixori = 2
self.postmatrix = "ffffffffffffffffffffffff7e7e7e7e7e3c3c3c3c"
self.commamatrixwid = 9
self.commamatrixhei = 21
self.commamatrixori = 53
self.commamatrix = "1ffffffffffffffffffff0783c1e0f078383c1e1e7e3e1c0"
self.fullstopmatrixwid = 9
self.fullstopmatrixhei = 9
self.fullstopmatrixori = 53
self.fullstopmatrix = "1ffffffffffffffffffff"
self.forslashmatrixwid = 25
self.forslashmatrixhei = 63
self.forslashmatrixori = 0
self.forslashmatrix = "7c00003c00003e00001f00000f00000f800007c00003e00001e00001f000\
00f800007800007c00003e00001e00001f00000f800007800007c00003e0\
0001e00001f00000f800007800007c00003e00001e00001f00000f800007\
800007c00003e00001f00000f00000f800007c00003c00003e00001f0000\
0f00000f800007c00003c00003e00001f00000f00000f800007c00003c00\
003e00001f00000f00000f800007c00003c00003e00001f00000f8000078\
00007c00003e00001e00001f00000"
self.lesthanmatrixwid = 41
self.lesthanmatrixhei = 41
self.lesthanmatrixori = 22
self.lesthanmatrix = "10000000003800000000fc00000001fe00000007ff0000000fff8000001f\
ffc000007fff800000ffff000001fffc000007fff800000fffe000001fff\
c000007fff000000fffe000003fffc000007fff000000fffe0000007ff80\
000003ff00000001fe00000000ffe00000007ffc0000003fffc0000003ff\
f80000007fff0000000ffff0000000fffe0000001fffe0000003fffc0000\
003fff80000007fff8000000ffff0000000ffff0000001fffc0000001ffe\
00000003ff000000007f8000000007c000000000e0000000001"
self.greatthanmatrixwid = 42
self.greatthanmatrixhei = 41
self.greatthanmatrixori = 22
self.greatthanmatrix = "30000000000f0000000003f000000000ff800000003ff80000000fffc000\
0003fffc0000003fffc0000001fffe0000001fffe0000000fffe0000000f\
fff0000000ffff00000007fff00000007fff80000003fff80000003fffc0\
000003fffc0000001fff00000001ffc00000000ff00000001ffc0000001f\
ff0000003fff8000003fff8000003fff8000007fff0000007fff000000ff\
ff000000fffe000001fffe000001fffe000003fffc000003fffc000003ff\
f8000000fff80000003ff80000000ff000000003f000000000f000000000\
20000000000"
self.questionmatrixwid = 36
self.questionmatrixhei = 63
self.questionmatrixori = -1
self.questionmatrix = "7fe000003fffe0000fffff8003fffffc007fffffe00fffffff00fffffff8\
1ff801ffc3fe0007fc3fc0003fe3f80001fe7f80000fe7f00000ff7f0000\
07f7e000007ffe000007ffe000007ffe000007ffe000007ffe000007f000\
0000fe0000000fe0000001fe0000003fc0000003fc0000007f8000000ff0\
000001ff0000003fe0000007fc000000ff8000001ff0000003fc0000007f\
8000000ff0000000ff0000001fe0000001fc0000003fc0000003f8000000\
3f80000003f80000003f80000003f80000003f80000003f8000000000000\
000000000000000000000000000000000000000000000000000000000000\
0000003f80000003f80000003f80000003f80000003f80000003f8000000\
3f80000003f80000003f8000"
self.colonmatrixwid = 9
self.colonmatrixhei = 45
self.colonmatrixori = 17
self.colonmatrix = "1ffffffffffffffffffff000000000000000000000000000000000000000\
0000000000000000000001ffffffffffffffffffff"
self.quotematrixwid = 22
self.quotematrixhei = 21
self.quotematrixori = 2
self.quotematrix = "3fc0ffff03fffc0ffff03fffc0ffff03fffc0ffff03fffc0ffff03fffc0f\
ffffffdf807e7e01f9f807e7e01f9f807e3c00f0f003c3c00f0f003c"
self.opensquigmatrixwid = 19
self.opensquigmatrixhei = 80
self.opensquigmatrixori = 0
self.opensquigmatrix = "7e007fc01ff807ff00ffe03ffc07f800fe003f8007f000fe001fc003f800\
7f000fe001fc003f8007f000fe001fc003f8007f000fe001fc003f8007f0\
00fe001fc003f8007f000fe001fc007f000fe003fc00ff007fc01ff803fc\
007f000fe001fe003fe001fe001fe001fc001fc003f8003f0007e000fe00\
1fc003f8007f000fe001fc003f8007f000fe001fc003f8007f000fe001fc\
003f8007f000fe001fc003f8007f000fe001fc001fc003fc007ff807ff00\
ffe00ffc00ff8003f"
self.closesquigmatrixwid = 20
self.closesquigmatrixhei = 80
self.closesquigmatrixori = 0
self.closesquigmatrix = "fe000ffc00ffe00fff00fff00fff8007f8003f8001fc001fc001fc001fc0\
01fc001fc001fc001fc001fc001fc001fc001fc001fc001fc001fc001fc0\
01fc001fc001fc001fc001fc001fc001fc000fc000fe000fe0007f0007f8\
003fe001ff000ff0007f0007f000ff001ff003fe007f8007f000fe000fe0\
00fc001fc001fc001fc001fc001fc001fc001fc001fc001fc001fc001fc0\
01fc001fc001fc001fc001fc001fc001fc001fc001fc001fc001fc001fc0\
03f8007f80fff80fff00fff00ffe00ff800fe000"
self.barmatrixwid = 5
self.barmatrixhei = 80
self.barmatrixori = 0
self.barmatrix = "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff\
ffffffffffffffffffffffffffffffffffffffff"
self.miscmatrixwid = 46
self.miscmatrixhei = 80
self.miscmatrixori = 0
self.miscmatrix = "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff\
ffffffffff8000000007fe000000001ff8000000007fe000000001ff8000\
000007fe000000001ff8000000007fe000000001ff8000000007fe000000\
001ff8000000007fe000000001ff8000000007fe000000001ff800000000\
7fe000000001ff8000000007fe000000001ff8000000007fe000000001ff\
8000000007fe000000001ff8000000007fe000000001ff8000000007fe00\
0000001ff8000000007fe000000001ff8000000007fe000000001ff80000\
00007fe000000001ff8000000007fe000000001ff8000000007fe0000000\
01ff8000000007fe000000001ff8000000007fe000000001ff8000000007\
fe000000001ff8000000007fe000000001ff8000000007fe000000001ff8\
000000007fe000000001ff8000000007fe000000001ff8000000007fe000\
000001ff8000000007fe000000001ff8000000007fe000000001ff800000\
0007fe000000001ff8000000007fe000000001ff8000000007fe00000000\
1ff8000000007fe000000001ff8000000007fe000000001ff8000000007f\
e000000001ffffffffffffffffffffffffffffffffffffffffffffffffff\
ffffffffffffffffffff"
def writea(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.amatrix)
charstring = (
self.amatrixwid * self.amatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.amatrixwid * self.amatrixhei:
charstring = charstring[0 - self.amatrixwid * self.amatrixhei :]
for i in range(0, len(charstring), self.amatrixwid):
for j in range(i, i + self.amatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.amatrixori
- self.amatrixwid
+ j / self.amatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.amatrixori
+ j / self.amatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.amatrixori + j / self.amatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.amatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.amatrixori + j / self.amatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.amatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.amatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.amatrixori + j / self.amatrixwid)
)
* sizeratio
)
- l,
)
def writeb(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.bmatrix)
charstring = (
self.bmatrixwid * self.bmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.bmatrixwid * self.bmatrixhei:
charstring = charstring[0 - self.bmatrixwid * self.bmatrixhei :]
for i in range(0, len(charstring), self.bmatrixwid):
for j in range(i, i + self.bmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.bmatrixori
- self.bmatrixwid
+ j / self.bmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.bmatrixori
+ j / self.bmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.bmatrixori + j / self.bmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.bmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.bmatrixori + j / self.bmatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.bmatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.bmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.bmatrixori + j / self.bmatrixwid)
)
* sizeratio
)
- l,
)
def writec(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.cmatrix)
charstring = (
self.cmatrixwid * self.cmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.cmatrixwid * self.cmatrixhei:
charstring = charstring[0 - self.cmatrixwid * self.cmatrixhei :]
for i in range(0, len(charstring), self.cmatrixwid):
for j in range(i, i + self.cmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.cmatrixori
- self.cmatrixwid
+ j / self.cmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.cmatrixori
+ j / self.cmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.cmatrixori + j / self.cmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.cmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.cmatrixori + j / self.cmatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.cmatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.cmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.cmatrixori + j / self.cmatrixwid)
)
* sizeratio
)
- l,
)
def writed(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.dmatrix)
charstring = (
self.dmatrixwid * self.dmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.dmatrixwid * self.dmatrixhei:
charstring = charstring[0 - self.dmatrixwid * self.dmatrixhei :]
for i in range(0, len(charstring), self.dmatrixwid):
for j in range(i, i + self.dmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.dmatrixori
- self.dmatrixwid
+ j / self.dmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.dmatrixori
+ j / self.dmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.dmatrixori + j / self.dmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.dmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.dmatrixori + j / self.dmatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.dmatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.dmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.dmatrixori + j / self.dmatrixwid)
)
* sizeratio
)
- l,
)
def writee(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.ematrix)
charstring = (
self.ematrixwid * self.ematrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.ematrixwid * self.ematrixhei:
charstring = charstring[0 - self.ematrixwid * self.ematrixhei :]
for i in range(0, len(charstring), self.ematrixwid):
for j in range(i, i + self.ematrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.ematrixori
- self.ematrixwid
+ j / self.ematrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.ematrixori
+ j / self.ematrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.ematrixori + j / self.ematrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.ematrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.ematrixori + j / self.ematrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.ematrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.ematrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.ematrixori + j / self.ematrixwid)
)
* sizeratio
)
- l,
)
def writef(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.fmatrix)
charstring = (
self.fmatrixwid * self.fmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.fmatrixwid * self.fmatrixhei:
charstring = charstring[0 - self.fmatrixwid * self.fmatrixhei :]
for i in range(0, len(charstring), self.fmatrixwid):
for j in range(i, i + self.fmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.fmatrixori
- self.fmatrixwid
+ j / self.fmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.fmatrixori
+ j / self.fmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.fmatrixori + j / self.fmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.fmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.fmatrixori + j / self.fmatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.fmatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.fmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.fmatrixori + j / self.fmatrixwid)
)
* sizeratio
)
- l,
)
def writeg(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.gmatrix)
charstring = (
self.gmatrixwid * self.gmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.gmatrixwid * self.gmatrixhei:
charstring = charstring[0 - self.gmatrixwid * self.gmatrixhei :]
for i in range(0, len(charstring), self.gmatrixwid):
for j in range(i, i + self.gmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.gmatrixori
- self.gmatrixwid
+ j / self.gmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.gmatrixori
+ j / self.gmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.gmatrixori + j / self.gmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.gmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.gmatrixori + j / self.gmatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.gmatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.gmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.gmatrixori + j / self.gmatrixwid)
)
* sizeratio
)
- l,
)
def writeh(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.hmatrix)
charstring = (
self.hmatrixwid * self.hmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.hmatrixwid * self.hmatrixhei:
charstring = charstring[0 - self.hmatrixwid * self.hmatrixhei :]
for i in range(0, len(charstring), self.hmatrixwid):
for j in range(i, i + self.hmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.hmatrixori
- self.hmatrixwid
+ j / self.hmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.hmatrixori
+ j / self.hmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.hmatrixori + j / self.hmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.hmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.hmatrixori + j / self.hmatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.hmatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.hmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.hmatrixori + j / self.hmatrixwid)
)
* sizeratio
)
- l,
)
def writei(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.imatrix)
charstring = (
self.imatrixwid * self.imatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.imatrixwid * self.imatrixhei:
charstring = charstring[0 - self.imatrixwid * self.imatrixhei :]
for i in range(0, len(charstring), self.imatrixwid):
for j in range(i, i + self.imatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.imatrixori
- self.imatrixwid
+ j / self.imatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.imatrixori
+ j / self.imatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.imatrixori + j / self.imatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.imatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.imatrixori + j / self.imatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.imatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.imatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.imatrixori + j / self.imatrixwid)
)
* sizeratio
)
- l,
)
def writej(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.jmatrix)
charstring = (
self.jmatrixwid * self.jmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.jmatrixwid * self.jmatrixhei:
charstring = charstring[0 - self.jmatrixwid * self.jmatrixhei :]
for i in range(0, len(charstring), self.jmatrixwid):
for j in range(i, i + self.jmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.jmatrixori
- self.jmatrixwid
+ j / self.jmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.jmatrixori
+ j / self.jmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.jmatrixori + j / self.jmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.jmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.jmatrixori + j / self.jmatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.jmatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.jmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.jmatrixori + j / self.jmatrixwid)
)
* sizeratio
)
- l,
)
def writek(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.kmatrix)
charstring = (
self.kmatrixwid * self.kmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.kmatrixwid * self.kmatrixhei:
charstring = charstring[0 - self.kmatrixwid * self.kmatrixhei :]
for i in range(0, len(charstring), self.kmatrixwid):
for j in range(i, i + self.kmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.kmatrixori
- self.kmatrixwid
+ j / self.kmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.kmatrixori
+ j / self.kmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.kmatrixori + j / self.kmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.kmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.kmatrixori + j / self.kmatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.kmatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.kmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.kmatrixori + j / self.kmatrixwid)
)
* sizeratio
)
- l,
)
def writel(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.lmatrix)
charstring = (
self.lmatrixwid * self.lmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.lmatrixwid * self.lmatrixhei:
charstring = charstring[0 - self.lmatrixwid * self.lmatrixhei :]
for i in range(0, len(charstring), self.lmatrixwid):
for j in range(i, i + self.lmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.lmatrixori
- self.lmatrixwid
+ j / self.lmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.lmatrixori
+ j / self.lmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.lmatrixori + j / self.lmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.lmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.lmatrixori + j / self.lmatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.lmatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.lmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.lmatrixori + j / self.lmatrixwid)
)
* sizeratio
)
- l,
)
def writem(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.mmatrix)
charstring = (
self.mmatrixwid * self.mmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.mmatrixwid * self.mmatrixhei:
charstring = charstring[0 - self.mmatrixwid * self.mmatrixhei :]
for i in range(0, len(charstring), self.mmatrixwid):
for j in range(i, i + self.mmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.mmatrixori
- self.mmatrixwid
+ j / self.mmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.mmatrixori
+ j / self.mmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.mmatrixori + j / self.mmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.mmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.mmatrixori + j / self.mmatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.mmatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.mmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.mmatrixori + j / self.mmatrixwid)
)
* sizeratio
)
- l,
)
def writen(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.nmatrix)
charstring = (
self.nmatrixwid * self.nmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.nmatrixwid * self.nmatrixhei:
charstring = charstring[0 - self.nmatrixwid * self.nmatrixhei :]
for i in range(0, len(charstring), self.nmatrixwid):
for j in range(i, i + self.nmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.nmatrixori
- self.nmatrixwid
+ j / self.nmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.nmatrixori
+ j / self.nmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.nmatrixori + j / self.nmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.nmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.nmatrixori + j / self.nmatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.nmatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.nmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.nmatrixori + j / self.nmatrixwid)
)
* sizeratio
)
- l,
)
def writeo(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.omatrix)
charstring = (
self.omatrixwid * self.omatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.omatrixwid * self.omatrixhei:
charstring = charstring[0 - self.omatrixwid * self.omatrixhei :]
for i in range(0, len(charstring), self.omatrixwid):
for j in range(i, i + self.omatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.omatrixori
- self.omatrixwid
+ j / self.omatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.omatrixori
+ j / self.omatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.omatrixori + j / self.omatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.omatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.omatrixori + j / self.omatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.omatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.omatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.omatrixori + j / self.omatrixwid)
)
* sizeratio
)
- l,
)
def writep(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.pmatrix)
charstring = (
self.pmatrixwid * self.pmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.pmatrixwid * self.pmatrixhei:
charstring = charstring[0 - self.pmatrixwid * self.pmatrixhei :]
for i in range(0, len(charstring), self.pmatrixwid):
for j in range(i, i + self.pmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.pmatrixori
- self.pmatrixwid
+ j / self.pmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.pmatrixori
+ j / self.pmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.pmatrixori + j / self.pmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.pmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.pmatrixori + j / self.pmatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.pmatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.pmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.pmatrixori + j / self.pmatrixwid)
)
* sizeratio
)
- l,
)
def writeq(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.qmatrix)
charstring = (
self.qmatrixwid * self.qmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.qmatrixwid * self.qmatrixhei:
charstring = charstring[0 - self.qmatrixwid * self.qmatrixhei :]
for i in range(0, len(charstring), self.qmatrixwid):
for j in range(i, i + self.qmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.qmatrixori
- self.qmatrixwid
+ j / self.qmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.qmatrixori
+ j / self.qmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.qmatrixori + j / self.qmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.qmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.qmatrixori + j / self.qmatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.qmatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.qmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.qmatrixori + j / self.qmatrixwid)
)
* sizeratio
)
- l,
)
def writer(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.rmatrix)
charstring = (
self.rmatrixwid * self.rmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.rmatrixwid * self.rmatrixhei:
charstring = charstring[0 - self.rmatrixwid * self.rmatrixhei :]
for i in range(0, len(charstring), self.rmatrixwid):
for j in range(i, i + self.rmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.rmatrixori
- self.rmatrixwid
+ j / self.rmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.rmatrixori
+ j / self.rmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.rmatrixori + j / self.rmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.rmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.rmatrixori + j / self.rmatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.rmatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.rmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.rmatrixori + j / self.rmatrixwid)
)
* sizeratio
)
- l,
)
def writes(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.smatrix)
charstring = (
self.smatrixwid * self.smatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.smatrixwid * self.smatrixhei:
charstring = charstring[0 - self.smatrixwid * self.smatrixhei :]
for i in range(0, len(charstring), self.smatrixwid):
for j in range(i, i + self.smatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.smatrixori
- self.smatrixwid
+ j / self.smatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.smatrixori
+ j / self.smatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.smatrixori + j / self.smatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.smatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.smatrixori + j / self.smatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.smatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.smatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.smatrixori + j / self.smatrixwid)
)
* sizeratio
)
- l,
)
def writet(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.tmatrix)
charstring = (
self.tmatrixwid * self.tmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.tmatrixwid * self.tmatrixhei:
charstring = charstring[0 - self.tmatrixwid * self.tmatrixhei :]
for i in range(0, len(charstring), self.tmatrixwid):
for j in range(i, i + self.tmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.tmatrixori
- self.tmatrixwid
+ j / self.tmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.tmatrixori
+ j / self.tmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.tmatrixori + j / self.tmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.tmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.tmatrixori + j / self.tmatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.tmatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.tmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.tmatrixori + j / self.tmatrixwid)
)
* sizeratio
)
- l,
)
def writeu(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.umatrix)
charstring = (
self.umatrixwid * self.umatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.umatrixwid * self.umatrixhei:
charstring = charstring[0 - self.umatrixwid * self.umatrixhei :]
for i in range(0, len(charstring), self.umatrixwid):
for j in range(i, i + self.umatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.umatrixori
- self.umatrixwid
+ j / self.umatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.umatrixori
+ j / self.umatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.umatrixori + j / self.umatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.umatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.umatrixori + j / self.umatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.umatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.umatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.umatrixori + j / self.umatrixwid)
)
* sizeratio
)
- l,
)
def writev(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.vmatrix)
charstring = (
self.vmatrixwid * self.vmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.vmatrixwid * self.vmatrixhei:
charstring = charstring[0 - self.vmatrixwid * self.vmatrixhei :]
for i in range(0, len(charstring), self.vmatrixwid):
for j in range(i, i + self.vmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.vmatrixori
- self.vmatrixwid
+ j / self.vmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.vmatrixori
+ j / self.vmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.vmatrixori + j / self.vmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.vmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.vmatrixori + j / self.vmatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.vmatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.vmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.vmatrixori + j / self.vmatrixwid)
)
* sizeratio
)
- l,
)
def writew(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.wmatrix)
charstring = (
self.wmatrixwid * self.wmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.wmatrixwid * self.wmatrixhei:
charstring = charstring[0 - self.wmatrixwid * self.wmatrixhei :]
for i in range(0, len(charstring), self.wmatrixwid):
for j in range(i, i + self.wmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.wmatrixori
- self.wmatrixwid
+ j / self.wmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.wmatrixori
+ j / self.wmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.wmatrixori + j / self.wmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.wmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.wmatrixori + j / self.wmatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.wmatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.wmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.wmatrixori + j / self.wmatrixwid)
)
* sizeratio
)
- l,
)
def writex(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.xmatrix)
charstring = (
self.xmatrixwid * self.xmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.xmatrixwid * self.xmatrixhei:
charstring = charstring[0 - self.xmatrixwid * self.xmatrixhei :]
for i in range(0, len(charstring), self.xmatrixwid):
for j in range(i, i + self.xmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.xmatrixori
- self.xmatrixwid
+ j / self.xmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.xmatrixori
+ j / self.xmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.xmatrixori + j / self.xmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.xmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.xmatrixori + j / self.xmatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.xmatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.xmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.xmatrixori + j / self.xmatrixwid)
)
* sizeratio
)
- l,
)
def writey(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.ymatrix)
charstring = (
self.ymatrixwid * self.ymatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.ymatrixwid * self.ymatrixhei:
charstring = charstring[0 - self.ymatrixwid * self.ymatrixhei :]
for i in range(0, len(charstring), self.ymatrixwid):
for j in range(i, i + self.ymatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.ymatrixori
- self.ymatrixwid
+ j / self.ymatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.ymatrixori
+ j / self.ymatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.ymatrixori + j / self.ymatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.ymatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.ymatrixori + j / self.ymatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.ymatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.ymatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.ymatrixori + j / self.ymatrixwid)
)
* sizeratio
)
- l,
)
def writez(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.zmatrix)
charstring = (
self.zmatrixwid * self.zmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.zmatrixwid * self.zmatrixhei:
charstring = charstring[0 - self.zmatrixwid * self.zmatrixhei :]
for i in range(0, len(charstring), self.zmatrixwid):
for j in range(i, i + self.zmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.zmatrixori
- self.zmatrixwid
+ j / self.zmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.zmatrixori
+ j / self.zmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.zmatrixori + j / self.zmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.zmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.zmatrixori + j / self.zmatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.zmatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.zmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.zmatrixori + j / self.zmatrixwid)
)
* sizeratio
)
- l,
)
def writeA(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.Amatrix)
charstring = (
self.Amatrixwid * self.Amatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.Amatrixwid * self.Amatrixhei:
charstring = charstring[0 - self.Amatrixwid * self.Amatrixhei :]
for i in range(0, len(charstring), self.Amatrixwid):
for j in range(i, i + self.Amatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.Amatrixori
- self.Amatrixwid
+ j / self.Amatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.Amatrixori
+ j / self.Amatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.Amatrixori + j / self.Amatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.Amatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.Amatrixori + j / self.Amatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.Amatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.Amatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.Amatrixori + j / self.Amatrixwid)
)
* sizeratio
)
- l,
)
def writeB(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.Bmatrix)
charstring = (
self.Bmatrixwid * self.Bmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.Bmatrixwid * self.Bmatrixhei:
charstring = charstring[0 - self.Bmatrixwid * self.Bmatrixhei :]
for i in range(0, len(charstring), self.Bmatrixwid):
for j in range(i, i + self.Bmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.Bmatrixori
- self.Bmatrixwid
+ j / self.Bmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.Bmatrixori
+ j / self.Bmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.Bmatrixori + j / self.Bmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.Bmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.Bmatrixori + j / self.Bmatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.Bmatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.Bmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.Bmatrixori + j / self.Bmatrixwid)
)
* sizeratio
)
- l,
)
def writeC(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.Cmatrix)
charstring = (
self.Cmatrixwid * self.Cmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.Cmatrixwid * self.Cmatrixhei:
charstring = charstring[0 - self.Cmatrixwid * self.Cmatrixhei :]
for i in range(0, len(charstring), self.Cmatrixwid):
for j in range(i, i + self.Cmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.Cmatrixori
- self.Cmatrixwid
+ j / self.Cmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.Cmatrixori
+ j / self.Cmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.Cmatrixori + j / self.Cmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.Cmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.Cmatrixori + j / self.Cmatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.Cmatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.Cmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.Cmatrixori + j / self.Cmatrixwid)
)
* sizeratio
)
- l,
)
def writeD(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.Dmatrix)
charstring = (
self.Dmatrixwid * self.Dmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.Dmatrixwid * self.Dmatrixhei:
charstring = charstring[0 - self.Dmatrixwid * self.Dmatrixhei :]
for i in range(0, len(charstring), self.Dmatrixwid):
for j in range(i, i + self.Dmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.Dmatrixori
- self.Dmatrixwid
+ j / self.Dmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.Dmatrixori
+ j / self.Dmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.Dmatrixori + j / self.Dmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.Dmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.Dmatrixori + j / self.Dmatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.Dmatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.Dmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.Dmatrixori + j / self.Dmatrixwid)
)
* sizeratio
)
- l,
)
def writeE(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.Ematrix)
charstring = (
self.Ematrixwid * self.Ematrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.Ematrixwid * self.Ematrixhei:
charstring = charstring[0 - self.Ematrixwid * self.Ematrixhei :]
for i in range(0, len(charstring), self.Ematrixwid):
for j in range(i, i + self.Ematrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.Ematrixori
- self.Ematrixwid
+ j / self.Ematrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.Ematrixori
+ j / self.Ematrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.Ematrixori + j / self.Ematrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.Ematrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.Ematrixori + j / self.Ematrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.Ematrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.Ematrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.Ematrixori + j / self.Ematrixwid)
)
* sizeratio
)
- l,
)
def writeF(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.Fmatrix)
charstring = (
self.Fmatrixwid * self.Fmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.Fmatrixwid * self.Fmatrixhei:
charstring = charstring[0 - self.Fmatrixwid * self.Fmatrixhei :]
for i in range(0, len(charstring), self.Fmatrixwid):
for j in range(i, i + self.Fmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.Fmatrixori
- self.Fmatrixwid
+ j / self.Fmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.Fmatrixori
+ j / self.Fmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.Fmatrixori + j / self.Fmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.Fmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.Fmatrixori + j / self.Fmatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.Fmatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.Fmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.Fmatrixori + j / self.Fmatrixwid)
)
* sizeratio
)
- l,
)
def writeG(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.Gmatrix)
charstring = (
self.Gmatrixwid * self.Gmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.Gmatrixwid * self.Gmatrixhei:
charstring = charstring[0 - self.Gmatrixwid * self.Gmatrixhei :]
for i in range(0, len(charstring), self.Gmatrixwid):
for j in range(i, i + self.Gmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.Gmatrixori
- self.Gmatrixwid
+ j / self.Gmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.Gmatrixori
+ j / self.Gmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.Gmatrixori + j / self.Gmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.Gmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.Gmatrixori + j / self.Gmatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.Gmatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.Gmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.Gmatrixori + j / self.Gmatrixwid)
)
* sizeratio
)
- l,
)
def writeH(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.Hmatrix)
charstring = (
self.Hmatrixwid * self.Hmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.Hmatrixwid * self.Hmatrixhei:
charstring = charstring[0 - self.Hmatrixwid * self.Hmatrixhei :]
for i in range(0, len(charstring), self.Hmatrixwid):
for j in range(i, i + self.Hmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.Hmatrixori
- self.Hmatrixwid
+ j / self.Hmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.Hmatrixori
+ j / self.Hmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.Hmatrixori + j / self.Hmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.Hmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.Hmatrixori + j / self.Hmatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.Hmatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.Hmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.Hmatrixori + j / self.Hmatrixwid)
)
* sizeratio
)
- l,
)
def writeI(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.Imatrix)
charstring = (
self.Imatrixwid * self.Imatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.Imatrixwid * self.Imatrixhei:
charstring = charstring[0 - self.Imatrixwid * self.Imatrixhei :]
for i in range(0, len(charstring), self.Imatrixwid):
for j in range(i, i + self.Imatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.Imatrixori
- self.Imatrixwid
+ j / self.Imatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.Imatrixori
+ j / self.Imatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.Imatrixori + j / self.Imatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.Imatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.Imatrixori + j / self.Imatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.Imatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.Imatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.Imatrixori + j / self.Imatrixwid)
)
* sizeratio
)
- l,
)
def writeJ(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.Jmatrix)
charstring = (
self.Jmatrixwid * self.Jmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.Jmatrixwid * self.Jmatrixhei:
charstring = charstring[0 - self.Jmatrixwid * self.Jmatrixhei :]
for i in range(0, len(charstring), self.Jmatrixwid):
for j in range(i, i + self.Jmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.Jmatrixori
- self.Jmatrixwid
+ j / self.Jmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.Jmatrixori
+ j / self.Jmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.Jmatrixori + j / self.Jmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.Jmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.Jmatrixori + j / self.Jmatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.Jmatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.Jmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.Jmatrixori + j / self.Jmatrixwid)
)
* sizeratio
)
- l,
)
def writeK(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.Kmatrix)
charstring = (
self.Kmatrixwid * self.Kmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.Kmatrixwid * self.Kmatrixhei:
charstring = charstring[0 - self.Kmatrixwid * self.Kmatrixhei :]
for i in range(0, len(charstring), self.Kmatrixwid):
for j in range(i, i + self.Kmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.Kmatrixori
- self.Kmatrixwid
+ j / self.Kmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.Kmatrixori
+ j / self.Kmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.Kmatrixori + j / self.Kmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.Kmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.Kmatrixori + j / self.Kmatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.Kmatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.Kmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.Kmatrixori + j / self.Kmatrixwid)
)
* sizeratio
)
- l,
)
def writeL(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.Lmatrix)
charstring = (
self.Lmatrixwid * self.Lmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.Lmatrixwid * self.Lmatrixhei:
charstring = charstring[0 - self.Lmatrixwid * self.Lmatrixhei :]
for i in range(0, len(charstring), self.Lmatrixwid):
for j in range(i, i + self.Lmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.Lmatrixori
- self.Lmatrixwid
+ j / self.Lmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.Lmatrixori
+ j / self.Lmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.Lmatrixori + j / self.Lmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.Lmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.Lmatrixori + j / self.Lmatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.Lmatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.Lmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.Lmatrixori + j / self.Lmatrixwid)
)
* sizeratio
)
- l,
)
def writeM(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.Mmatrix)
charstring = (
self.Mmatrixwid * self.Mmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.Mmatrixwid * self.Mmatrixhei:
charstring = charstring[0 - self.Mmatrixwid * self.Mmatrixhei :]
for i in range(0, len(charstring), self.Mmatrixwid):
for j in range(i, i + self.Mmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.Mmatrixori
- self.Mmatrixwid
+ j / self.Mmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.Mmatrixori
+ j / self.Mmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.Mmatrixori + j / self.Mmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.Mmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.Mmatrixori + j / self.Mmatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.Mmatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.Mmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.Mmatrixori + j / self.Mmatrixwid)
)
* sizeratio
)
- l,
)
def writeN(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.Nmatrix)
charstring = (
self.Nmatrixwid * self.Nmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.Nmatrixwid * self.Nmatrixhei:
charstring = charstring[0 - self.Nmatrixwid * self.Nmatrixhei :]
for i in range(0, len(charstring), self.Nmatrixwid):
for j in range(i, i + self.Nmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.Nmatrixori
- self.Nmatrixwid
+ j / self.Nmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.Nmatrixori
+ j / self.Nmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.Nmatrixori + j / self.Nmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.Nmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.Nmatrixori + j / self.Nmatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.Nmatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.Nmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.Nmatrixori + j / self.Nmatrixwid)
)
* sizeratio
)
- l,
)
def writeO(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.Omatrix)
charstring = (
self.Omatrixwid * self.Omatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.Omatrixwid * self.Omatrixhei:
charstring = charstring[0 - self.Omatrixwid * self.Omatrixhei :]
for i in range(0, len(charstring), self.Omatrixwid):
for j in range(i, i + self.Omatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.Omatrixori
- self.Omatrixwid
+ j / self.Omatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.Omatrixori
+ j / self.Omatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.Omatrixori + j / self.Omatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.Omatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.Omatrixori + j / self.Omatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.Omatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.Omatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.Omatrixori + j / self.Omatrixwid)
)
* sizeratio
)
- l,
)
def writeP(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.Pmatrix)
charstring = (
self.Pmatrixwid * self.Pmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.Pmatrixwid * self.Pmatrixhei:
charstring = charstring[0 - self.Pmatrixwid * self.Pmatrixhei :]
for i in range(0, len(charstring), self.Pmatrixwid):
for j in range(i, i + self.Pmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.Pmatrixori
- self.Pmatrixwid
+ j / self.Pmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.Pmatrixori
+ j / self.Pmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.Pmatrixori + j / self.Pmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.Pmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.Pmatrixori + j / self.Pmatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.Pmatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.Pmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.Pmatrixori + j / self.Pmatrixwid)
)
* sizeratio
)
- l,
)
def writeQ(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.Qmatrix)
charstring = (
self.Qmatrixwid * self.Qmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.Qmatrixwid * self.Qmatrixhei:
charstring = charstring[0 - self.Qmatrixwid * self.Qmatrixhei :]
for i in range(0, len(charstring), self.Qmatrixwid):
for j in range(i, i + self.Qmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.Qmatrixori
- self.Qmatrixwid
+ j / self.Qmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.Qmatrixori
+ j / self.Qmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.Qmatrixori + j / self.Qmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.Qmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.Qmatrixori + j / self.Qmatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.Qmatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.Qmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.Qmatrixori + j / self.Qmatrixwid)
)
* sizeratio
)
- l,
)
def writeR(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.Rmatrix)
charstring = (
self.Rmatrixwid * self.Rmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.Rmatrixwid * self.Rmatrixhei:
charstring = charstring[0 - self.Rmatrixwid * self.Rmatrixhei :]
for i in range(0, len(charstring), self.Rmatrixwid):
for j in range(i, i + self.Rmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.Rmatrixori
- self.Rmatrixwid
+ j / self.Rmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.Rmatrixori
+ j / self.Rmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.Rmatrixori + j / self.Rmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.Rmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.Rmatrixori + j / self.Rmatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.Rmatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.Rmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.Rmatrixori + j / self.Rmatrixwid)
)
* sizeratio
)
- l,
)
def writeS(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.Smatrix)
charstring = (
self.Smatrixwid * self.Smatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.Smatrixwid * self.Smatrixhei:
charstring = charstring[0 - self.Smatrixwid * self.Smatrixhei :]
for i in range(0, len(charstring), self.Smatrixwid):
for j in range(i, i + self.Smatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.Smatrixori
- self.Smatrixwid
+ j / self.Smatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.Smatrixori
+ j / self.Smatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.Smatrixori + j / self.Smatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.Smatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.Smatrixori + j / self.Smatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.Smatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.Smatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.Smatrixori + j / self.Smatrixwid)
)
* sizeratio
)
- l,
)
def writeT(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.Tmatrix)
charstring = (
self.Tmatrixwid * self.Tmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.Tmatrixwid * self.Tmatrixhei:
charstring = charstring[0 - self.Tmatrixwid * self.Tmatrixhei :]
for i in range(0, len(charstring), self.Tmatrixwid):
for j in range(i, i + self.Tmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.Tmatrixori
- self.Tmatrixwid
+ j / self.Tmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.Tmatrixori
+ j / self.Tmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.Tmatrixori + j / self.Tmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.Tmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.Tmatrixori + j / self.Tmatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.Tmatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.Tmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.Tmatrixori + j / self.Tmatrixwid)
)
* sizeratio
)
- l,
)
def writeU(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.Umatrix)
charstring = (
self.Umatrixwid * self.Umatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.Umatrixwid * self.Umatrixhei:
charstring = charstring[0 - self.Umatrixwid * self.Umatrixhei :]
for i in range(0, len(charstring), self.Umatrixwid):
for j in range(i, i + self.Umatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.Umatrixori
- self.Umatrixwid
+ j / self.Umatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.Umatrixori
+ j / self.Umatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.Umatrixori + j / self.Umatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.Umatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.Umatrixori + j / self.Umatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.Umatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.Umatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.Umatrixori + j / self.Umatrixwid)
)
* sizeratio
)
- l,
)
def writeV(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.Vmatrix)
charstring = (
self.Vmatrixwid * self.Vmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.Vmatrixwid * self.Vmatrixhei:
charstring = charstring[0 - self.Vmatrixwid * self.Vmatrixhei :]
for i in range(0, len(charstring), self.Vmatrixwid):
for j in range(i, i + self.Vmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.Vmatrixori
- self.Vmatrixwid
+ j / self.Vmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.Vmatrixori
+ j / self.Vmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.Vmatrixori + j / self.Vmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.Vmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.Vmatrixori + j / self.Vmatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.Vmatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.Vmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.Vmatrixori + j / self.Vmatrixwid)
)
* sizeratio
)
- l,
)
def writeW(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.Wmatrix)
charstring = (
self.Wmatrixwid * self.Wmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.Wmatrixwid * self.Wmatrixhei:
charstring = charstring[0 - self.Wmatrixwid * self.Wmatrixhei :]
for i in range(0, len(charstring), self.Wmatrixwid):
for j in range(i, i + self.Wmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.Wmatrixori
- self.Wmatrixwid
+ j / self.Wmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.Wmatrixori
+ j / self.Wmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.Wmatrixori + j / self.Wmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.Wmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.Wmatrixori + j / self.Wmatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.Wmatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.Wmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.Wmatrixori + j / self.Wmatrixwid)
)
* sizeratio
)
- l,
)
def writeX(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.Xmatrix)
charstring = (
self.Xmatrixwid * self.Xmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.Xmatrixwid * self.Xmatrixhei:
charstring = charstring[0 - self.Xmatrixwid * self.Xmatrixhei :]
for i in range(0, len(charstring), self.Xmatrixwid):
for j in range(i, i + self.Xmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.Xmatrixori
- self.Xmatrixwid
+ j / self.Xmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.Xmatrixori
+ j / self.Xmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.Xmatrixori + j / self.Xmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.Xmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.Xmatrixori + j / self.Xmatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.Xmatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.Xmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.Xmatrixori + j / self.Xmatrixwid)
)
* sizeratio
)
- l,
)
def writeY(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.Ymatrix)
charstring = (
self.Ymatrixwid * self.Ymatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.Ymatrixwid * self.Ymatrixhei:
charstring = charstring[0 - self.Ymatrixwid * self.Ymatrixhei :]
for i in range(0, len(charstring), self.Ymatrixwid):
for j in range(i, i + self.Ymatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.Ymatrixori
- self.Ymatrixwid
+ j / self.Ymatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.Ymatrixori
+ j / self.Ymatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.Ymatrixori + j / self.Ymatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.Ymatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.Ymatrixori + j / self.Ymatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.Ymatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.Ymatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.Ymatrixori + j / self.Ymatrixwid)
)
* sizeratio
)
- l,
)
def writeZ(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.Zmatrix)
charstring = (
self.Zmatrixwid * self.Zmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.Zmatrixwid * self.Zmatrixhei:
charstring = charstring[0 - self.Zmatrixwid * self.Zmatrixhei :]
for i in range(0, len(charstring), self.Zmatrixwid):
for j in range(i, i + self.Zmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.Zmatrixori
- self.Zmatrixwid
+ j / self.Zmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.Zmatrixori
+ j / self.Zmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.Zmatrixori + j / self.Zmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.Zmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.Zmatrixori + j / self.Zmatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.Zmatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.Zmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.Zmatrixori + j / self.Zmatrixwid)
)
* sizeratio
)
- l,
)
def writeone(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.onematrix)
charstring = (
self.onematrixwid * self.onematrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.onematrixwid * self.onematrixhei:
charstring = charstring[0 - self.onematrixwid * self.onematrixhei :]
for i in range(0, len(charstring), self.onematrixwid):
for j in range(i, i + self.onematrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.onematrixori
- self.onematrixwid
+ j / self.onematrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.onematrixori
+ j / self.onematrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.onematrixori + j / self.onematrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.onematrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (
self.onematrixori
+ j / self.onematrixwid
)
)
* sizeratio
)
- l,
2 * x
- (
x + int((j % self.onematrixwid) * sizeratio) - k
),
)
else:
self.plotPoint(
x + int((j % self.onematrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (
self.onematrixori
+ j / self.onematrixwid
)
)
* sizeratio
)
- l,
)
def writetwo(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.twomatrix)
charstring = (
self.twomatrixwid * self.twomatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.twomatrixwid * self.twomatrixhei:
charstring = charstring[0 - self.twomatrixwid * self.twomatrixhei :]
for i in range(0, len(charstring), self.twomatrixwid):
for j in range(i, i + self.twomatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.twomatrixori
- self.twomatrixwid
+ j / self.twomatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.twomatrixori
+ j / self.twomatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.twomatrixori + j / self.twomatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.twomatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (
self.twomatrixori
+ j / self.twomatrixwid
)
)
* sizeratio
)
- l,
2 * x
- (
x + int((j % self.twomatrixwid) * sizeratio) - k
),
)
else:
self.plotPoint(
x + int((j % self.twomatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (
self.twomatrixori
+ j / self.twomatrixwid
)
)
* sizeratio
)
- l,
)
def writethree(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.threematrix)
charstring = (
self.threematrixwid * self.threematrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.threematrixwid * self.threematrixhei:
charstring = charstring[0 - self.threematrixwid * self.threematrixhei :]
for i in range(0, len(charstring), self.threematrixwid):
for j in range(i, i + self.threematrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.threematrixori
- self.threematrixwid
+ j / self.threematrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.threematrixori
+ j / self.threematrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.threematrixori + j / self.threematrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.threematrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (
self.threematrixori
+ j / self.threematrixwid
)
)
* sizeratio
)
- l,
2 * x
- (
x
+ int((j % self.threematrixwid) * sizeratio)
- k
),
)
else:
self.plotPoint(
x + int((j % self.threematrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (
self.threematrixori
+ j / self.threematrixwid
)
)
* sizeratio
)
- l,
)
def writefour(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.fourmatrix)
charstring = (
self.fourmatrixwid * self.fourmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.fourmatrixwid * self.fourmatrixhei:
charstring = charstring[0 - self.fourmatrixwid * self.fourmatrixhei :]
for i in range(0, len(charstring), self.fourmatrixwid):
for j in range(i, i + self.fourmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.fourmatrixori
- self.fourmatrixwid
+ j / self.fourmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.fourmatrixori
+ j / self.fourmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.fourmatrixori + j / self.fourmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.fourmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (
self.fourmatrixori
+ j / self.fourmatrixwid
)
)
* sizeratio
)
- l,
2 * x
- (
x
+ int((j % self.fourmatrixwid) * sizeratio)
- k
),
)
else:
self.plotPoint(
x + int((j % self.fourmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (
self.fourmatrixori
+ j / self.fourmatrixwid
)
)
* sizeratio
)
- l,
)
def writefive(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.fivematrix)
charstring = (
self.fivematrixwid * self.fivematrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.fivematrixwid * self.fivematrixhei:
charstring = charstring[0 - self.fivematrixwid * self.fivematrixhei :]
for i in range(0, len(charstring), self.fivematrixwid):
for j in range(i, i + self.fivematrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.fivematrixori
- self.fivematrixwid
+ j / self.fivematrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.fivematrixori
+ j / self.fivematrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.fivematrixori + j / self.fivematrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.fivematrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (
self.fivematrixori
+ j / self.fivematrixwid
)
)
* sizeratio
)
- l,
2 * x
- (
x
+ int((j % self.fivematrixwid) * sizeratio)
- k
),
)
else:
self.plotPoint(
x + int((j % self.fivematrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (
self.fivematrixori
+ j / self.fivematrixwid
)
)
* sizeratio
)
- l,
)
def writesix(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.sixmatrix)
charstring = (
self.sixmatrixwid * self.sixmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.sixmatrixwid * self.sixmatrixhei:
charstring = charstring[0 - self.sixmatrixwid * self.sixmatrixhei :]
for i in range(0, len(charstring), self.sixmatrixwid):
for j in range(i, i + self.sixmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.sixmatrixori
- self.sixmatrixwid
+ j / self.sixmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.sixmatrixori
+ j / self.sixmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.sixmatrixori + j / self.sixmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.sixmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (
self.sixmatrixori
+ j / self.sixmatrixwid
)
)
* sizeratio
)
- l,
2 * x
- (
x + int((j % self.sixmatrixwid) * sizeratio) - k
),
)
else:
self.plotPoint(
x + int((j % self.sixmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (
self.sixmatrixori
+ j / self.sixmatrixwid
)
)
* sizeratio
)
- l,
)
def writeseven(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.sevenmatrix)
charstring = (
self.sevenmatrixwid * self.sevenmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.sevenmatrixwid * self.sevenmatrixhei:
charstring = charstring[0 - self.sevenmatrixwid * self.sevenmatrixhei :]
for i in range(0, len(charstring), self.sevenmatrixwid):
for j in range(i, i + self.sevenmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.sevenmatrixori
- self.sevenmatrixwid
+ j / self.sevenmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.sevenmatrixori
+ j / self.sevenmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.sevenmatrixori + j / self.sevenmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.sevenmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (
self.sevenmatrixori
+ j / self.sevenmatrixwid
)
)
* sizeratio
)
- l,
2 * x
- (
x
+ int((j % self.sevenmatrixwid) * sizeratio)
- k
),
)
else:
self.plotPoint(
x + int((j % self.sevenmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (
self.sevenmatrixori
+ j / self.sevenmatrixwid
)
)
* sizeratio
)
- l,
)
def writeeight(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.eightmatrix)
charstring = (
self.eightmatrixwid * self.eightmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.eightmatrixwid * self.eightmatrixhei:
charstring = charstring[0 - self.eightmatrixwid * self.eightmatrixhei :]
for i in range(0, len(charstring), self.eightmatrixwid):
for j in range(i, i + self.eightmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.eightmatrixori
- self.eightmatrixwid
+ j / self.eightmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.eightmatrixori
+ j / self.eightmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.eightmatrixori + j / self.eightmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.eightmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (
self.eightmatrixori
+ j / self.eightmatrixwid
)
)
* sizeratio
)
- l,
2 * x
- (
x
+ int((j % self.eightmatrixwid) * sizeratio)
- k
),
)
else:
self.plotPoint(
x + int((j % self.eightmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (
self.eightmatrixori
+ j / self.eightmatrixwid
)
)
* sizeratio
)
- l,
)
def writenine(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.ninematrix)
charstring = (
self.ninematrixwid * self.ninematrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.ninematrixwid * self.ninematrixhei:
charstring = charstring[0 - self.ninematrixwid * self.ninematrixhei :]
for i in range(0, len(charstring), self.ninematrixwid):
for j in range(i, i + self.ninematrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.ninematrixori
- self.ninematrixwid
+ j / self.ninematrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.ninematrixori
+ j / self.ninematrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.ninematrixori + j / self.ninematrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.ninematrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (
self.ninematrixori
+ j / self.ninematrixwid
)
)
* sizeratio
)
- l,
2 * x
- (
x
+ int((j % self.ninematrixwid) * sizeratio)
- k
),
)
else:
self.plotPoint(
x + int((j % self.ninematrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (
self.ninematrixori
+ j / self.ninematrixwid
)
)
* sizeratio
)
- l,
)
def writeten(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.tenmatrix)
charstring = (
self.tenmatrixwid * self.tenmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.tenmatrixwid * self.tenmatrixhei:
charstring = charstring[0 - self.tenmatrixwid * self.tenmatrixhei :]
for i in range(0, len(charstring), self.tenmatrixwid):
for j in range(i, i + self.tenmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.tenmatrixori
- self.tenmatrixwid
+ j / self.tenmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.tenmatrixori
+ j / self.tenmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.tenmatrixori + j / self.tenmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.tenmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (
self.tenmatrixori
+ j / self.tenmatrixwid
)
)
* sizeratio
)
- l,
2 * x
- (
x + int((j % self.tenmatrixwid) * sizeratio) - k
),
)
else:
self.plotPoint(
x + int((j % self.tenmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (
self.tenmatrixori
+ j / self.tenmatrixwid
)
)
* sizeratio
)
- l,
)
def write_(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self._matrix)
charstring = (
self._matrixwid * self._matrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self._matrixwid * self._matrixhei:
charstring = charstring[0 - self._matrixwid * self._matrixhei :]
for i in range(0, len(charstring), self._matrixwid):
for j in range(i, i + self._matrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self._matrixori
- self._matrixwid
+ j / self._matrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self._matrixori
+ j / self._matrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self._matrixori + j / self._matrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self._matrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self._matrixori + j / self._matrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self._matrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self._matrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self._matrixori + j / self._matrixwid)
)
* sizeratio
)
- l,
)
def writeminus(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.minusmatrix)
charstring = (
self.minusmatrixwid * self.minusmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.minusmatrixwid * self.minusmatrixhei:
charstring = charstring[0 - self.minusmatrixwid * self.minusmatrixhei :]
for i in range(0, len(charstring), self.minusmatrixwid):
for j in range(i, i + self.minusmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.minusmatrixori
- self.minusmatrixwid
+ j / self.minusmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.minusmatrixori
+ j / self.minusmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.minusmatrixori + j / self.minusmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.minusmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (
self.minusmatrixori
+ j / self.minusmatrixwid
)
)
* sizeratio
)
- l,
2 * x
- (
x
+ int((j % self.minusmatrixwid) * sizeratio)
- k
),
)
else:
self.plotPoint(
x + int((j % self.minusmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (
self.minusmatrixori
+ j / self.minusmatrixwid
)
)
* sizeratio
)
- l,
)
def writeplus(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.plusmatrix)
charstring = (
self.plusmatrixwid * self.plusmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.plusmatrixwid * self.plusmatrixhei:
charstring = charstring[0 - self.plusmatrixwid * self.plusmatrixhei :]
for i in range(0, len(charstring), self.plusmatrixwid):
for j in range(i, i + self.plusmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.plusmatrixori
- self.plusmatrixwid
+ j / self.plusmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.plusmatrixori
+ j / self.plusmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.plusmatrixori + j / self.plusmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.plusmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (
self.plusmatrixori
+ j / self.plusmatrixwid
)
)
* sizeratio
)
- l,
2 * x
- (
x
+ int((j % self.plusmatrixwid) * sizeratio)
- k
),
)
else:
self.plotPoint(
x + int((j % self.plusmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (
self.plusmatrixori
+ j / self.plusmatrixwid
)
)
* sizeratio
)
- l,
)
def writeequal(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.equalmatrix)
charstring = (
self.equalmatrixwid * self.equalmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.equalmatrixwid * self.equalmatrixhei:
charstring = charstring[0 - self.equalmatrixwid * self.equalmatrixhei :]
for i in range(0, len(charstring), self.equalmatrixwid):
for j in range(i, i + self.equalmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.equalmatrixori
- self.equalmatrixwid
+ j / self.equalmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.equalmatrixori
+ j / self.equalmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.equalmatrixori + j / self.equalmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.equalmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (
self.equalmatrixori
+ j / self.equalmatrixwid
)
)
* sizeratio
)
- l,
2 * x
- (
x
+ int((j % self.equalmatrixwid) * sizeratio)
- k
),
)
else:
self.plotPoint(
x + int((j % self.equalmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (
self.equalmatrixori
+ j / self.equalmatrixwid
)
)
* sizeratio
)
- l,
)
def writeexcl(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.exclmatrix)
charstring = (
self.exclmatrixwid * self.exclmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.exclmatrixwid * self.exclmatrixhei:
charstring = charstring[0 - self.exclmatrixwid * self.exclmatrixhei :]
for i in range(0, len(charstring), self.exclmatrixwid):
for j in range(i, i + self.exclmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.exclmatrixori
- self.exclmatrixwid
+ j / self.exclmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.exclmatrixori
+ j / self.exclmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.exclmatrixori + j / self.exclmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.exclmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (
self.exclmatrixori
+ j / self.exclmatrixwid
)
)
* sizeratio
)
- l,
2 * x
- (
x
+ int((j % self.exclmatrixwid) * sizeratio)
- k
),
)
else:
self.plotPoint(
x + int((j % self.exclmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (
self.exclmatrixori
+ j / self.exclmatrixwid
)
)
* sizeratio
)
- l,
)
def writeat(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.atmatrix)
charstring = (
self.atmatrixwid * self.atmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.atmatrixwid * self.atmatrixhei:
charstring = charstring[0 - self.atmatrixwid * self.atmatrixhei :]
for i in range(0, len(charstring), self.atmatrixwid):
for j in range(i, i + self.atmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.atmatrixori
- self.atmatrixwid
+ j / self.atmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.atmatrixori
+ j / self.atmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.atmatrixori + j / self.atmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.atmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (self.atmatrixori + j / self.atmatrixwid)
)
* sizeratio
)
- l,
2 * x
- (x + int((j % self.atmatrixwid) * sizeratio) - k),
)
else:
self.plotPoint(
x + int((j % self.atmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (self.atmatrixori + j / self.atmatrixwid)
)
* sizeratio
)
- l,
)
def writehash(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.hashmatrix)
charstring = (
self.hashmatrixwid * self.hashmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.hashmatrixwid * self.hashmatrixhei:
charstring = charstring[0 - self.hashmatrixwid * self.hashmatrixhei :]
for i in range(0, len(charstring), self.hashmatrixwid):
for j in range(i, i + self.hashmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.hashmatrixori
- self.hashmatrixwid
+ j / self.hashmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.hashmatrixori
+ j / self.hashmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.hashmatrixori + j / self.hashmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.hashmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (
self.hashmatrixori
+ j / self.hashmatrixwid
)
)
* sizeratio
)
- l,
2 * x
- (
x
+ int((j % self.hashmatrixwid) * sizeratio)
- k
),
)
else:
self.plotPoint(
x + int((j % self.hashmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (
self.hashmatrixori
+ j / self.hashmatrixwid
)
)
* sizeratio
)
- l,
)
def writedollar(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.dollarmatrix)
charstring = (
self.dollarmatrixwid * self.dollarmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.dollarmatrixwid * self.dollarmatrixhei:
charstring = charstring[0 - self.dollarmatrixwid * self.dollarmatrixhei :]
for i in range(0, len(charstring), self.dollarmatrixwid):
for j in range(i, i + self.dollarmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.dollarmatrixori
- self.dollarmatrixwid
+ j / self.dollarmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.dollarmatrixori
+ j / self.dollarmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(
self.dollarmatrixori
+ j / self.dollarmatrixwid
)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.dollarmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (
self.dollarmatrixori
+ j / self.dollarmatrixwid
)
)
* sizeratio
)
- l,
2 * x
- (
x
+ int((j % self.dollarmatrixwid) * sizeratio)
- k
),
)
else:
self.plotPoint(
x + int((j % self.dollarmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (
self.dollarmatrixori
+ j / self.dollarmatrixwid
)
)
* sizeratio
)
- l,
)
def writepercent(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.percentmatrix)
charstring = (
self.percentmatrixwid * self.percentmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.percentmatrixwid * self.percentmatrixhei:
charstring = charstring[0 - self.percentmatrixwid * self.percentmatrixhei :]
for i in range(0, len(charstring), self.percentmatrixwid):
for j in range(i, i + self.percentmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.percentmatrixori
- self.percentmatrixwid
+ j / self.percentmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.percentmatrixori
+ j / self.percentmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(
self.percentmatrixori
+ j / self.percentmatrixwid
)
* sizeratio
)
- l
- self.defsize,
x
+ int((j % self.percentmatrixwid) * sizeratio)
- k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (
self.percentmatrixori
+ j / self.percentmatrixwid
)
)
* sizeratio
)
- l,
2 * x
- (
x
+ int((j % self.percentmatrixwid) * sizeratio)
- k
),
)
else:
self.plotPoint(
x
+ int((j % self.percentmatrixwid) * sizeratio)
- k,
y
+ int(
(
self.defsize
- (
self.percentmatrixori
+ j / self.percentmatrixwid
)
)
* sizeratio
)
- l,
)
def writehat(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.hatmatrix)
charstring = (
self.hatmatrixwid * self.hatmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.hatmatrixwid * self.hatmatrixhei:
charstring = charstring[0 - self.hatmatrixwid * self.hatmatrixhei :]
for i in range(0, len(charstring), self.hatmatrixwid):
for j in range(i, i + self.hatmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.hatmatrixori
- self.hatmatrixwid
+ j / self.hatmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.hatmatrixori
+ j / self.hatmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.hatmatrixori + j / self.hatmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.hatmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (
self.hatmatrixori
+ j / self.hatmatrixwid
)
)
* sizeratio
)
- l,
2 * x
- (
x + int((j % self.hatmatrixwid) * sizeratio) - k
),
)
else:
self.plotPoint(
x + int((j % self.hatmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (
self.hatmatrixori
+ j / self.hatmatrixwid
)
)
* sizeratio
)
- l,
)
def writeamp(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.ampmatrix)
charstring = (
self.ampmatrixwid * self.ampmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.ampmatrixwid * self.ampmatrixhei:
charstring = charstring[0 - self.ampmatrixwid * self.ampmatrixhei :]
for i in range(0, len(charstring), self.ampmatrixwid):
for j in range(i, i + self.ampmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.ampmatrixori
- self.ampmatrixwid
+ j / self.ampmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.ampmatrixori
+ j / self.ampmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.ampmatrixori + j / self.ampmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.ampmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (
self.ampmatrixori
+ j / self.ampmatrixwid
)
)
* sizeratio
)
- l,
2 * x
- (
x + int((j % self.ampmatrixwid) * sizeratio) - k
),
)
else:
self.plotPoint(
x + int((j % self.ampmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (
self.ampmatrixori
+ j / self.ampmatrixwid
)
)
* sizeratio
)
- l,
)
def writestrix(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.strixmatrix)
charstring = (
self.strixmatrixwid * self.strixmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.strixmatrixwid * self.strixmatrixhei:
charstring = charstring[0 - self.strixmatrixwid * self.strixmatrixhei :]
for i in range(0, len(charstring), self.strixmatrixwid):
for j in range(i, i + self.strixmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.strixmatrixori
- self.strixmatrixwid
+ j / self.strixmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.strixmatrixori
+ j / self.strixmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.strixmatrixori + j / self.strixmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.strixmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (
self.strixmatrixori
+ j / self.strixmatrixwid
)
)
* sizeratio
)
- l,
2 * x
- (
x
+ int((j % self.strixmatrixwid) * sizeratio)
- k
),
)
else:
self.plotPoint(
x + int((j % self.strixmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (
self.strixmatrixori
+ j / self.strixmatrixwid
)
)
* sizeratio
)
- l,
)
def writeopencpar(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.opencparmatrix)
charstring = (
self.opencparmatrixwid * self.opencparmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.opencparmatrixwid * self.opencparmatrixhei:
charstring = charstring[
0 - self.opencparmatrixwid * self.opencparmatrixhei :
]
for i in range(0, len(charstring), self.opencparmatrixwid):
for j in range(i, i + self.opencparmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.opencparmatrixori
- self.opencparmatrixwid
+ j / self.opencparmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.opencparmatrixori
+ j / self.opencparmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(
self.opencparmatrixori
+ j / self.opencparmatrixwid
)
* sizeratio
)
- l
- self.defsize,
x
+ int((j % self.opencparmatrixwid) * sizeratio)
- k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (
self.opencparmatrixori
+ j / self.opencparmatrixwid
)
)
* sizeratio
)
- l,
2 * x
- (
x
+ int((j % self.opencparmatrixwid) * sizeratio)
- k
),
)
else:
self.plotPoint(
x
+ int((j % self.opencparmatrixwid) * sizeratio)
- k,
y
+ int(
(
self.defsize
- (
self.opencparmatrixori
+ j / self.opencparmatrixwid
)
)
* sizeratio
)
- l,
)
def writeclosecpar(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.closecparmatrix)
charstring = (
self.closecparmatrixwid * self.closecparmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.closecparmatrixwid * self.closecparmatrixhei:
charstring = charstring[
0 - self.closecparmatrixwid * self.closecparmatrixhei :
]
for i in range(0, len(charstring), self.closecparmatrixwid):
for j in range(i, i + self.closecparmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.closecparmatrixori
- self.closecparmatrixwid
+ j / self.closecparmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.closecparmatrixori
+ j / self.closecparmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(
self.closecparmatrixori
+ j / self.closecparmatrixwid
)
* sizeratio
)
- l
- self.defsize,
x
+ int((j % self.closecparmatrixwid) * sizeratio)
- k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (
self.closecparmatrixori
+ j / self.closecparmatrixwid
)
)
* sizeratio
)
- l,
2 * x
- (
x
+ int((j % self.closecparmatrixwid) * sizeratio)
- k
),
)
else:
self.plotPoint(
x
+ int((j % self.closecparmatrixwid) * sizeratio)
- k,
y
+ int(
(
self.defsize
- (
self.closecparmatrixori
+ j / self.closecparmatrixwid
)
)
* sizeratio
)
- l,
)
def writeopenspar(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.opensparmatrix)
charstring = (
self.opensparmatrixwid * self.opensparmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.opensparmatrixwid * self.opensparmatrixhei:
charstring = charstring[
0 - self.opensparmatrixwid * self.opensparmatrixhei :
]
for i in range(0, len(charstring), self.opensparmatrixwid):
for j in range(i, i + self.opensparmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.opensparmatrixori
- self.opensparmatrixwid
+ j / self.opensparmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.opensparmatrixori
+ j / self.opensparmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(
self.opensparmatrixori
+ j / self.opensparmatrixwid
)
* sizeratio
)
- l
- self.defsize,
x
+ int((j % self.opensparmatrixwid) * sizeratio)
- k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (
self.opensparmatrixori
+ j / self.opensparmatrixwid
)
)
* sizeratio
)
- l,
2 * x
- (
x
+ int((j % self.opensparmatrixwid) * sizeratio)
- k
),
)
else:
self.plotPoint(
x
+ int((j % self.opensparmatrixwid) * sizeratio)
- k,
y
+ int(
(
self.defsize
- (
self.opensparmatrixori
+ j / self.opensparmatrixwid
)
)
* sizeratio
)
- l,
)
def writeclosespar(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.closesparmatrix)
charstring = (
self.closesparmatrixwid * self.closesparmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.closesparmatrixwid * self.closesparmatrixhei:
charstring = charstring[
0 - self.closesparmatrixwid * self.closesparmatrixhei :
]
for i in range(0, len(charstring), self.closesparmatrixwid):
for j in range(i, i + self.closesparmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.closesparmatrixori
- self.closesparmatrixwid
+ j / self.closesparmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.closesparmatrixori
+ j / self.closesparmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(
self.closesparmatrixori
+ j / self.closesparmatrixwid
)
* sizeratio
)
- l
- self.defsize,
x
+ int((j % self.closesparmatrixwid) * sizeratio)
- k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (
self.closesparmatrixori
+ j / self.closesparmatrixwid
)
)
* sizeratio
)
- l,
2 * x
- (
x
+ int((j % self.closesparmatrixwid) * sizeratio)
- k
),
)
else:
self.plotPoint(
x
+ int((j % self.closesparmatrixwid) * sizeratio)
- k,
y
+ int(
(
self.defsize
- (
self.closesparmatrixori
+ j / self.closesparmatrixwid
)
)
* sizeratio
)
- l,
)
def writebackslash(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.backslashmatrix)
charstring = (
self.backslashmatrixwid * self.backslashmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.backslashmatrixwid * self.backslashmatrixhei:
charstring = charstring[
0 - self.backslashmatrixwid * self.backslashmatrixhei :
]
for i in range(0, len(charstring), self.backslashmatrixwid):
for j in range(i, i + self.backslashmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.backslashmatrixori
- self.backslashmatrixwid
+ j / self.backslashmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.backslashmatrixori
+ j / self.backslashmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(
self.backslashmatrixori
+ j / self.backslashmatrixwid
)
* sizeratio
)
- l
- self.defsize,
x
+ int((j % self.backslashmatrixwid) * sizeratio)
- k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (
self.backslashmatrixori
+ j / self.backslashmatrixwid
)
)
* sizeratio
)
- l,
2 * x
- (
x
+ int((j % self.backslashmatrixwid) * sizeratio)
- k
),
)
else:
self.plotPoint(
x
+ int((j % self.backslashmatrixwid) * sizeratio)
- k,
y
+ int(
(
self.defsize
- (
self.backslashmatrixori
+ j / self.backslashmatrixwid
)
)
* sizeratio
)
- l,
)
def writesemicol(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.semicolmatrix)
charstring = (
self.semicolmatrixwid * self.semicolmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.semicolmatrixwid * self.semicolmatrixhei:
charstring = charstring[0 - self.semicolmatrixwid * self.semicolmatrixhei :]
for i in range(0, len(charstring), self.semicolmatrixwid):
for j in range(i, i + self.semicolmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.semicolmatrixori
- self.semicolmatrixwid
+ j / self.semicolmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.semicolmatrixori
+ j / self.semicolmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(
self.semicolmatrixori
+ j / self.semicolmatrixwid
)
* sizeratio
)
- l
- self.defsize,
x
+ int((j % self.semicolmatrixwid) * sizeratio)
- k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (
self.semicolmatrixori
+ j / self.semicolmatrixwid
)
)
* sizeratio
)
- l,
2 * x
- (
x
+ int((j % self.semicolmatrixwid) * sizeratio)
- k
),
)
else:
self.plotPoint(
x
+ int((j % self.semicolmatrixwid) * sizeratio)
- k,
y
+ int(
(
self.defsize
- (
self.semicolmatrixori
+ j / self.semicolmatrixwid
)
)
* sizeratio
)
- l,
)
def writepost(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.postmatrix)
charstring = (
self.postmatrixwid * self.postmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.postmatrixwid * self.postmatrixhei:
charstring = charstring[0 - self.postmatrixwid * self.postmatrixhei :]
for i in range(0, len(charstring), self.postmatrixwid):
for j in range(i, i + self.postmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.postmatrixori
- self.postmatrixwid
+ j / self.postmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.postmatrixori
+ j / self.postmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.postmatrixori + j / self.postmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.postmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (
self.postmatrixori
+ j / self.postmatrixwid
)
)
* sizeratio
)
- l,
2 * x
- (
x
+ int((j % self.postmatrixwid) * sizeratio)
- k
),
)
else:
self.plotPoint(
x + int((j % self.postmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (
self.postmatrixori
+ j / self.postmatrixwid
)
)
* sizeratio
)
- l,
)
def writecomma(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.commamatrix)
charstring = (
self.commamatrixwid * self.commamatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.commamatrixwid * self.commamatrixhei:
charstring = charstring[0 - self.commamatrixwid * self.commamatrixhei :]
for i in range(0, len(charstring), self.commamatrixwid):
for j in range(i, i + self.commamatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.commamatrixori
- self.commamatrixwid
+ j / self.commamatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.commamatrixori
+ j / self.commamatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.commamatrixori + j / self.commamatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.commamatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (
self.commamatrixori
+ j / self.commamatrixwid
)
)
* sizeratio
)
- l,
2 * x
- (
x
+ int((j % self.commamatrixwid) * sizeratio)
- k
),
)
else:
self.plotPoint(
x + int((j % self.commamatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (
self.commamatrixori
+ j / self.commamatrixwid
)
)
* sizeratio
)
- l,
)
def writefullstop(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.fullstopmatrix)
charstring = (
self.fullstopmatrixwid * self.fullstopmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.fullstopmatrixwid * self.fullstopmatrixhei:
charstring = charstring[
0 - self.fullstopmatrixwid * self.fullstopmatrixhei :
]
for i in range(0, len(charstring), self.fullstopmatrixwid):
for j in range(i, i + self.fullstopmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.fullstopmatrixori
- self.fullstopmatrixwid
+ j / self.fullstopmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.fullstopmatrixori
+ j / self.fullstopmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(
self.fullstopmatrixori
+ j / self.fullstopmatrixwid
)
* sizeratio
)
- l
- self.defsize,
x
+ int((j % self.fullstopmatrixwid) * sizeratio)
- k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (
self.fullstopmatrixori
+ j / self.fullstopmatrixwid
)
)
* sizeratio
)
- l,
2 * x
- (
x
+ int((j % self.fullstopmatrixwid) * sizeratio)
- k
),
)
else:
self.plotPoint(
x
+ int((j % self.fullstopmatrixwid) * sizeratio)
- k,
y
+ int(
(
self.defsize
- (
self.fullstopmatrixori
+ j / self.fullstopmatrixwid
)
)
* sizeratio
)
- l,
)
def writeforslash(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.forslashmatrix)
charstring = (
self.forslashmatrixwid * self.forslashmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.forslashmatrixwid * self.forslashmatrixhei:
charstring = charstring[
0 - self.forslashmatrixwid * self.forslashmatrixhei :
]
for i in range(0, len(charstring), self.forslashmatrixwid):
for j in range(i, i + self.forslashmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.forslashmatrixori
- self.forslashmatrixwid
+ j / self.forslashmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.forslashmatrixori
+ j / self.forslashmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(
self.forslashmatrixori
+ j / self.forslashmatrixwid
)
* sizeratio
)
- l
- self.defsize,
x
+ int((j % self.forslashmatrixwid) * sizeratio)
- k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (
self.forslashmatrixori
+ j / self.forslashmatrixwid
)
)
* sizeratio
)
- l,
2 * x
- (
x
+ int((j % self.forslashmatrixwid) * sizeratio)
- k
),
)
else:
self.plotPoint(
x
+ int((j % self.forslashmatrixwid) * sizeratio)
- k,
y
+ int(
(
self.defsize
- (
self.forslashmatrixori
+ j / self.forslashmatrixwid
)
)
* sizeratio
)
- l,
)
def writelesthan(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.lesthanmatrix)
charstring = (
self.lesthanmatrixwid * self.lesthanmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.lesthanmatrixwid * self.lesthanmatrixhei:
charstring = charstring[0 - self.lesthanmatrixwid * self.lesthanmatrixhei :]
for i in range(0, len(charstring), self.lesthanmatrixwid):
for j in range(i, i + self.lesthanmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.lesthanmatrixori
- self.lesthanmatrixwid
+ j / self.lesthanmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.lesthanmatrixori
+ j / self.lesthanmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(
self.lesthanmatrixori
+ j / self.lesthanmatrixwid
)
* sizeratio
)
- l
- self.defsize,
x
+ int((j % self.lesthanmatrixwid) * sizeratio)
- k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (
self.lesthanmatrixori
+ j / self.lesthanmatrixwid
)
)
* sizeratio
)
- l,
2 * x
- (
x
+ int((j % self.lesthanmatrixwid) * sizeratio)
- k
),
)
else:
self.plotPoint(
x
+ int((j % self.lesthanmatrixwid) * sizeratio)
- k,
y
+ int(
(
self.defsize
- (
self.lesthanmatrixori
+ j / self.lesthanmatrixwid
)
)
* sizeratio
)
- l,
)
def writegreatthan(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.greatthanmatrix)
charstring = (
self.greatthanmatrixwid * self.greatthanmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.greatthanmatrixwid * self.greatthanmatrixhei:
charstring = charstring[
0 - self.greatthanmatrixwid * self.greatthanmatrixhei :
]
for i in range(0, len(charstring), self.greatthanmatrixwid):
for j in range(i, i + self.greatthanmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.greatthanmatrixori
- self.greatthanmatrixwid
+ j / self.greatthanmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.greatthanmatrixori
+ j / self.greatthanmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(
self.greatthanmatrixori
+ j / self.greatthanmatrixwid
)
* sizeratio
)
- l
- self.defsize,
x
+ int((j % self.greatthanmatrixwid) * sizeratio)
- k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (
self.greatthanmatrixori
+ j / self.greatthanmatrixwid
)
)
* sizeratio
)
- l,
2 * x
- (
x
+ int((j % self.greatthanmatrixwid) * sizeratio)
- k
),
)
else:
self.plotPoint(
x
+ int((j % self.greatthanmatrixwid) * sizeratio)
- k,
y
+ int(
(
self.defsize
- (
self.greatthanmatrixori
+ j / self.greatthanmatrixwid
)
)
* sizeratio
)
- l,
)
def writequestion(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.questionmatrix)
charstring = (
self.questionmatrixwid * self.questionmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.questionmatrixwid * self.questionmatrixhei:
charstring = charstring[
0 - self.questionmatrixwid * self.questionmatrixhei :
]
for i in range(0, len(charstring), self.questionmatrixwid):
for j in range(i, i + self.questionmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.questionmatrixori
- self.questionmatrixwid
+ j / self.questionmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.questionmatrixori
+ j / self.questionmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(
self.questionmatrixori
+ j / self.questionmatrixwid
)
* sizeratio
)
- l
- self.defsize,
x
+ int((j % self.questionmatrixwid) * sizeratio)
- k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (
self.questionmatrixori
+ j / self.questionmatrixwid
)
)
* sizeratio
)
- l,
2 * x
- (
x
+ int((j % self.questionmatrixwid) * sizeratio)
- k
),
)
else:
self.plotPoint(
x
+ int((j % self.questionmatrixwid) * sizeratio)
- k,
y
+ int(
(
self.defsize
- (
self.questionmatrixori
+ j / self.questionmatrixwid
)
)
* sizeratio
)
- l,
)
def writecolon(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.colonmatrix)
charstring = (
self.colonmatrixwid * self.colonmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.colonmatrixwid * self.colonmatrixhei:
charstring = charstring[0 - self.colonmatrixwid * self.colonmatrixhei :]
for i in range(0, len(charstring), self.colonmatrixwid):
for j in range(i, i + self.colonmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.colonmatrixori
- self.colonmatrixwid
+ j / self.colonmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.colonmatrixori
+ j / self.colonmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.colonmatrixori + j / self.colonmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.colonmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (
self.colonmatrixori
+ j / self.colonmatrixwid
)
)
* sizeratio
)
- l,
2 * x
- (
x
+ int((j % self.colonmatrixwid) * sizeratio)
- k
),
)
else:
self.plotPoint(
x + int((j % self.colonmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (
self.colonmatrixori
+ j / self.colonmatrixwid
)
)
* sizeratio
)
- l,
)
def writequote(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.quotematrix)
charstring = (
self.quotematrixwid * self.quotematrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.quotematrixwid * self.quotematrixhei:
charstring = charstring[0 - self.quotematrixwid * self.quotematrixhei :]
for i in range(0, len(charstring), self.quotematrixwid):
for j in range(i, i + self.quotematrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.quotematrixori
- self.quotematrixwid
+ j / self.quotematrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.quotematrixori
+ j / self.quotematrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.quotematrixori + j / self.quotematrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.quotematrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (
self.quotematrixori
+ j / self.quotematrixwid
)
)
* sizeratio
)
- l,
2 * x
- (
x
+ int((j % self.quotematrixwid) * sizeratio)
- k
),
)
else:
self.plotPoint(
x + int((j % self.quotematrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (
self.quotematrixori
+ j / self.quotematrixwid
)
)
* sizeratio
)
- l,
)
def writeopensquig(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.opensquigmatrix)
charstring = (
self.opensquigmatrixwid * self.opensquigmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.opensquigmatrixwid * self.opensquigmatrixhei:
charstring = charstring[
0 - self.opensquigmatrixwid * self.opensquigmatrixhei :
]
for i in range(0, len(charstring), self.opensquigmatrixwid):
for j in range(i, i + self.opensquigmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.opensquigmatrixori
- self.opensquigmatrixwid
+ j / self.opensquigmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.opensquigmatrixori
+ j / self.opensquigmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(
self.opensquigmatrixori
+ j / self.opensquigmatrixwid
)
* sizeratio
)
- l
- self.defsize,
x
+ int((j % self.opensquigmatrixwid) * sizeratio)
- k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (
self.opensquigmatrixori
+ j / self.opensquigmatrixwid
)
)
* sizeratio
)
- l,
2 * x
- (
x
+ int((j % self.opensquigmatrixwid) * sizeratio)
- k
),
)
else:
self.plotPoint(
x
+ int((j % self.opensquigmatrixwid) * sizeratio)
- k,
y
+ int(
(
self.defsize
- (
self.opensquigmatrixori
+ j / self.opensquigmatrixwid
)
)
* sizeratio
)
- l,
)
def writeclosesquig(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.closesquigmatrix)
charstring = (
self.closesquigmatrixwid * self.closesquigmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.closesquigmatrixwid * self.closesquigmatrixhei:
charstring = charstring[
0 - self.closesquigmatrixwid * self.closesquigmatrixhei :
]
for i in range(0, len(charstring), self.closesquigmatrixwid):
for j in range(i, i + self.closesquigmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.closesquigmatrixori
- self.closesquigmatrixwid
+ j / self.closesquigmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.closesquigmatrixori
+ j / self.closesquigmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(
self.closesquigmatrixori
+ j / self.closesquigmatrixwid
)
* sizeratio
)
- l
- self.defsize,
x
+ int((j % self.closesquigmatrixwid) * sizeratio)
- k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (
self.closesquigmatrixori
+ j / self.closesquigmatrixwid
)
)
* sizeratio
)
- l,
2 * x
- (
x
+ int(
(j % self.closesquigmatrixwid) * sizeratio
)
- k
),
)
else:
self.plotPoint(
x
+ int((j % self.closesquigmatrixwid) * sizeratio)
- k,
y
+ int(
(
self.defsize
- (
self.closesquigmatrixori
+ j / self.closesquigmatrixwid
)
)
* sizeratio
)
- l,
)
def writebar(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.barmatrix)
charstring = (
self.barmatrixwid * self.barmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.barmatrixwid * self.barmatrixhei:
charstring = charstring[0 - self.barmatrixwid * self.barmatrixhei :]
for i in range(0, len(charstring), self.barmatrixwid):
for j in range(i, i + self.barmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.barmatrixori
- self.barmatrixwid
+ j / self.barmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.barmatrixori
+ j / self.barmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.barmatrixori + j / self.barmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.barmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (
self.barmatrixori
+ j / self.barmatrixwid
)
)
* sizeratio
)
- l,
2 * x
- (
x + int((j % self.barmatrixwid) * sizeratio) - k
),
)
else:
self.plotPoint(
x + int((j % self.barmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (
self.barmatrixori
+ j / self.barmatrixwid
)
)
* sizeratio
)
- l,
)
def writemisc(self, x, y, size, ital, bold, sans, rotate):
xpos = x
sizeratio = size * 1.0 / self.defsize
sizeint = (size - 1) / self.defsize + 1
if bold:
sizeint = sizeint + 2 + int(sizeratio)
charstring = binar(self.miscmatrix)
charstring = (
self.miscmatrixwid * self.miscmatrixhei - len(charstring)
) * "0" + charstring
if len(charstring) > self.miscmatrixwid * self.miscmatrixhei:
charstring = charstring[0 - self.miscmatrixwid * self.miscmatrixhei :]
for i in range(0, len(charstring), self.miscmatrixwid):
for j in range(i, i + self.miscmatrixwid):
if charstring[j] == "1":
for k in range(sizeint):
for l in range(sizeint):
if ital and rotate == -1:
x = (
xpos
+ (
int(
(
(
self.miscmatrixori
- self.miscmatrixwid
+ j / self.miscmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
elif ital:
x = (
xpos
+ (
int(
(
self.defsize
- (
self.miscmatrixori
+ j / self.miscmatrixwid
)
)
* sizeratio
)
- l
)
/ 3
)
if rotate == 1:
self.plotPoint(
y
+ int(
(self.miscmatrixori + j / self.miscmatrixwid)
* sizeratio
)
- l
- self.defsize,
x + int((j % self.miscmatrixwid) * sizeratio) - k,
)
elif rotate == -1:
self.plotPoint(
y
+ int(
(
self.defsize
- (
self.miscmatrixori
+ j / self.miscmatrixwid
)
)
* sizeratio
)
- l,
2 * x
- (
x
+ int((j % self.miscmatrixwid) * sizeratio)
- k
),
)
else:
self.plotPoint(
x + int((j % self.miscmatrixwid) * sizeratio) - k,
y
+ int(
(
self.defsize
- (
self.miscmatrixori
+ j / self.miscmatrixwid
)
)
* sizeratio
)
- l,
)
def writeString(
self, thestring, x, y, size, ital=False, bold=False, rotate=0, justify="left"
):
xpos = x
if rotate != 0:
xpos, y = y, xpos
if justify == "right":
xpos -= self.lengthString(thestring, size)
for i in thestring:
if i == "a":
self.writea(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.amatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.amatrixwid * size / self.defsize + 1 + size / 20
elif i == "b":
self.writeb(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.bmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.bmatrixwid * size / self.defsize + 1 + size / 20
elif i == "c":
self.writec(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.cmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.cmatrixwid * size / self.defsize + 1 + size / 20
elif i == "d":
self.writed(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.dmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.dmatrixwid * size / self.defsize + 1 + size / 20
elif i == "e":
self.writee(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.ematrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.ematrixwid * size / self.defsize + 1 + size / 20
elif i == "f":
self.writef(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.fmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.fmatrixwid * size / self.defsize + 1 + size / 20
elif i == "g":
self.writeg(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.gmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.gmatrixwid * size / self.defsize + 1 + size / 20
elif i == "h":
self.writeh(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.hmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.hmatrixwid * size / self.defsize + 1 + size / 20
elif i == "i":
self.writei(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.imatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.imatrixwid * size / self.defsize + 1 + size / 20
elif i == "j":
self.writej(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.jmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.jmatrixwid * size / self.defsize + 1 + size / 20
elif i == "k":
self.writek(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.kmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.kmatrixwid * size / self.defsize + 1 + size / 20
elif i == "l":
self.writel(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.lmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.lmatrixwid * size / self.defsize + 1 + size / 20
elif i == "m":
self.writem(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.mmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.mmatrixwid * size / self.defsize + 1 + size / 20
elif i == "n":
self.writen(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.nmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.nmatrixwid * size / self.defsize + 1 + size / 20
elif i == "o":
self.writeo(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.omatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.omatrixwid * size / self.defsize + 1 + size / 20
elif i == "p":
self.writep(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.pmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.pmatrixwid * size / self.defsize + 1 + size / 20
elif i == "q":
self.writeq(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.qmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.qmatrixwid * size / self.defsize + 1 + size / 20
elif i == "r":
self.writer(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.rmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.rmatrixwid * size / self.defsize + 1 + size / 20
elif i == "s":
self.writes(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.smatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.smatrixwid * size / self.defsize + 1 + size / 20
elif i == "t":
self.writet(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.tmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.tmatrixwid * size / self.defsize + 1 + size / 20
elif i == "u":
self.writeu(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.umatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.umatrixwid * size / self.defsize + 1 + size / 20
elif i == "v":
self.writev(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.vmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.vmatrixwid * size / self.defsize + 1 + size / 20
elif i == "w":
self.writew(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.wmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.wmatrixwid * size / self.defsize + 1 + size / 20
elif i == "x":
self.writex(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.xmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.xmatrixwid * size / self.defsize + 1 + size / 20
elif i == "y":
self.writey(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.ymatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.ymatrixwid * size / self.defsize + 1 + size / 20
elif i == "z":
self.writez(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.zmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.zmatrixwid * size / self.defsize + 1 + size / 20
elif i == "A":
self.writeA(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.Amatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.Amatrixwid * size / self.defsize + 1 + size / 20
elif i == "B":
self.writeB(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.Bmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.Bmatrixwid * size / self.defsize + 1 + size / 20
elif i == "C":
self.writeC(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.Cmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.Cmatrixwid * size / self.defsize + 1 + size / 20
elif i == "D":
self.writeD(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.Dmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.Dmatrixwid * size / self.defsize + 1 + size / 20
elif i == "E":
self.writeE(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.Ematrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.Ematrixwid * size / self.defsize + 1 + size / 20
elif i == "F":
self.writeF(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.Fmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.Fmatrixwid * size / self.defsize + 1 + size / 20
elif i == "G":
self.writeG(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.Gmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.Gmatrixwid * size / self.defsize + 1 + size / 20
elif i == "H":
self.writeH(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.Hmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.Hmatrixwid * size / self.defsize + 1 + size / 20
elif i == "I":
self.writeI(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.Imatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.Imatrixwid * size / self.defsize + 1 + size / 20
elif i == "J":
self.writeJ(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.Jmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.Jmatrixwid * size / self.defsize + 1 + size / 20
elif i == "K":
self.writeK(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.Kmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.Kmatrixwid * size / self.defsize + 1 + size / 20
elif i == "L":
self.writeL(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.Lmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.Lmatrixwid * size / self.defsize + 1 + size / 20
elif i == "M":
self.writeM(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.Mmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.Mmatrixwid * size / self.defsize + 1 + size / 20
elif i == "N":
self.writeN(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.Nmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.Nmatrixwid * size / self.defsize + 1 + size / 20
elif i == "O":
self.writeO(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.Omatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.Omatrixwid * size / self.defsize + 1 + size / 20
elif i == "P":
self.writeP(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.Pmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.Pmatrixwid * size / self.defsize + 1 + size / 20
elif i == "Q":
self.writeQ(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.Qmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.Qmatrixwid * size / self.defsize + 1 + size / 20
elif i == "R":
self.writeR(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.Rmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.Rmatrixwid * size / self.defsize + 1 + size / 20
elif i == "S":
self.writeS(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.Smatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.Smatrixwid * size / self.defsize + 1 + size / 20
elif i == "T":
self.writeT(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.Tmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.Tmatrixwid * size / self.defsize + 1 + size / 20
elif i == "U":
self.writeU(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.Umatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.Umatrixwid * size / self.defsize + 1 + size / 20
elif i == "V":
self.writeV(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.Vmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.Vmatrixwid * size / self.defsize + 1 + size / 20
elif i == "W":
self.writeW(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.Wmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.Wmatrixwid * size / self.defsize + 1 + size / 20
elif i == "X":
self.writeX(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.Xmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.Xmatrixwid * size / self.defsize + 1 + size / 20
elif i == "Y":
self.writeY(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.Ymatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.Ymatrixwid * size / self.defsize + 1 + size / 20
elif i == "Z":
self.writeZ(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.Zmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.Zmatrixwid * size / self.defsize + 1 + size / 20
elif i == "1":
self.writeone(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.onematrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.onematrixwid * size / self.defsize + 1 + size / 20
elif i == "2":
self.writetwo(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.twomatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.twomatrixwid * size / self.defsize + 1 + size / 20
elif i == "3":
self.writethree(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.threematrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.threematrixwid * size / self.defsize + 1 + size / 20
elif i == "4":
self.writefour(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.fourmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.fourmatrixwid * size / self.defsize + 1 + size / 20
elif i == "5":
self.writefive(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.fivematrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.fivematrixwid * size / self.defsize + 1 + size / 20
elif i == "6":
self.writesix(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.sixmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.sixmatrixwid * size / self.defsize + 1 + size / 20
elif i == "7":
self.writeseven(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.sevenmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.sevenmatrixwid * size / self.defsize + 1 + size / 20
elif i == "8":
self.writeeight(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.eightmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.eightmatrixwid * size / self.defsize + 1 + size / 20
elif i == "9":
self.writenine(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.ninematrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.ninematrixwid * size / self.defsize + 1 + size / 20
elif i == "0":
self.writeten(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.tenmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.tenmatrixwid * size / self.defsize + 1 + size / 20
elif i == "_":
self.write_(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self._matrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self._matrixwid * size / self.defsize + 1 + size / 20
elif i == "-":
self.writeminus(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.minusmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.minusmatrixwid * size / self.defsize + 1 + size / 20
elif i == "+":
self.writeplus(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.plusmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.plusmatrixwid * size / self.defsize + 1 + size / 20
elif i == "=":
self.writeequal(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.equalmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.equalmatrixwid * size / self.defsize + 1 + size / 20
elif i == "!":
self.writeexcl(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.exclmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.exclmatrixwid * size / self.defsize + 1 + size / 20
elif i == "@":
self.writeat(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.atmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.atmatrixwid * size / self.defsize + 1 + size / 20
elif i == "#":
self.writehash(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.hashmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.hashmatrixwid * size / self.defsize + 1 + size / 20
elif i == "$":
self.writedollar(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.dollarmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.dollarmatrixwid * size / self.defsize + 1 + size / 20
elif i == "%":
self.writepercent(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.percentmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.percentmatrixwid * size / self.defsize + 1 + size / 20
elif i == "^":
self.writehat(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.hatmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.hatmatrixwid * size / self.defsize + 1 + size / 20
elif i == "&":
self.writeamp(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.ampmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.ampmatrixwid * size / self.defsize + 1 + size / 20
elif i == "*":
self.writestrix(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.strixmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.strixmatrixwid * size / self.defsize + 1 + size / 20
elif i == "(":
self.writeopencpar(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.opencparmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.opencparmatrixwid * size / self.defsize + 1 + size / 20
elif i == ")":
self.writeclosecpar(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= (
self.closecparmatrixwid * size / self.defsize + 1 + size / 20
)
else:
xpos += (
self.closecparmatrixwid * size / self.defsize + 1 + size / 20
)
elif i == "[":
self.writeopenspar(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.opensparmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.opensparmatrixwid * size / self.defsize + 1 + size / 20
elif i == "]":
self.writeclosespar(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= (
self.closesparmatrixwid * size / self.defsize + 1 + size / 20
)
else:
xpos += (
self.closesparmatrixwid * size / self.defsize + 1 + size / 20
)
elif i == "\\":
self.writebackslash(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= (
self.backslashmatrixwid * size / self.defsize + 1 + size / 20
)
else:
xpos += (
self.backslashmatrixwid * size / self.defsize + 1 + size / 20
)
elif i == ";":
self.writesemicol(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.semicolmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.semicolmatrixwid * size / self.defsize + 1 + size / 20
elif i == "'":
self.writepost(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.postmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.postmatrixwid * size / self.defsize + 1 + size / 20
elif i == ",":
self.writecomma(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.commamatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.commamatrixwid * size / self.defsize + 1 + size / 20
elif i == ".":
self.writefullstop(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.fullstopmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.fullstopmatrixwid * size / self.defsize + 1 + size / 20
elif i == "/":
self.writeforslash(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.forslashmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.forslashmatrixwid * size / self.defsize + 1 + size / 20
elif i == "<":
self.writelesthan(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.lesthanmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.lesthanmatrixwid * size / self.defsize + 1 + size / 20
elif i == ">":
self.writegreatthan(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= (
self.greatthanmatrixwid * size / self.defsize + 1 + size / 20
)
else:
xpos += (
self.greatthanmatrixwid * size / self.defsize + 1 + size / 20
)
elif i == "?":
self.writequestion(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.questionmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.questionmatrixwid * size / self.defsize + 1 + size / 20
elif i == ":":
self.writecolon(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.colonmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.colonmatrixwid * size / self.defsize + 1 + size / 20
elif i == '"':
self.writequote(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.quotematrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.quotematrixwid * size / self.defsize + 1 + size / 20
elif i == "{":
self.writeopensquig(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= (
self.opensquigmatrixwid * size / self.defsize + 1 + size / 20
)
else:
xpos += (
self.opensquigmatrixwid * size / self.defsize + 1 + size / 20
)
elif i == "}":
self.writeclosesquig(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= (
self.closesquigmatrixwid * size / self.defsize + 1 + size / 20
)
else:
xpos += (
self.closesquigmatrixwid * size / self.defsize + 1 + size / 20
)
elif i == "|":
self.writebar(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.barmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.barmatrixwid * size / self.defsize + 1 + size / 20
elif i == " ":
if rotate == -1:
xpos -= 24 * size / self.defsize + 1 + size / 20
else:
xpos += 24 * size / self.defsize + 1 + size / 20
else:
self.writemisc(xpos, y, size, ital, bold, True, rotate)
if rotate == -1:
xpos -= self.miscmatrixwid * size / self.defsize + 1 + size / 20
else:
xpos += self.miscmatrixwid * size / self.defsize + 1 + size / 20
def lengthString(self, theString, size):
xpos = 0
for i in theString:
if i == "a":
xpos += self.amatrixwid * size / self.defsize + 1 + size / 20
elif i == "b":
xpos += self.bmatrixwid * size / self.defsize + 1 + size / 20
elif i == "c":
xpos += self.cmatrixwid * size / self.defsize + 1 + size / 20
elif i == "d":
xpos += self.dmatrixwid * size / self.defsize + 1 + size / 20
elif i == "e":
xpos += self.ematrixwid * size / self.defsize + 1 + size / 20
elif i == "f":
xpos += self.fmatrixwid * size / self.defsize + 1 + size / 20
elif i == "g":
xpos += self.gmatrixwid * size / self.defsize + 1 + size / 20
elif i == "h":
xpos += self.hmatrixwid * size / self.defsize + 1 + size / 20
elif i == "i":
xpos += self.imatrixwid * size / self.defsize + 1 + size / 20
elif i == "j":
xpos += self.jmatrixwid * size / self.defsize + 1 + size / 20
elif i == "k":
xpos += self.kmatrixwid * size / self.defsize + 1 + size / 20
elif i == "l":
xpos += self.lmatrixwid * size / self.defsize + 1 + size / 20
elif i == "m":
xpos += self.mmatrixwid * size / self.defsize + 1 + size / 20
elif i == "n":
xpos += self.nmatrixwid * size / self.defsize + 1 + size / 20
elif i == "o":
xpos += self.omatrixwid * size / self.defsize + 1 + size / 20
elif i == "p":
xpos += self.pmatrixwid * size / self.defsize + 1 + size / 20
elif i == "q":
xpos += self.qmatrixwid * size / self.defsize + 1 + size / 20
elif i == "r":
xpos += self.rmatrixwid * size / self.defsize + 1 + size / 20
elif i == "s":
xpos += self.smatrixwid * size / self.defsize + 1 + size / 20
elif i == "t":
xpos += self.tmatrixwid * size / self.defsize + 1 + size / 20
elif i == "u":
xpos += self.umatrixwid * size / self.defsize + 1 + size / 20
elif i == "v":
xpos += self.vmatrixwid * size / self.defsize + 1 + size / 20
elif i == "w":
xpos += self.wmatrixwid * size / self.defsize + 1 + size / 20
elif i == "x":
xpos += self.xmatrixwid * size / self.defsize + 1 + size / 20
elif i == "y":
xpos += self.ymatrixwid * size / self.defsize + 1 + size / 20
elif i == "z":
xpos += self.zmatrixwid * size / self.defsize + 1 + size / 20
elif i == "A":
xpos += self.Amatrixwid * size / self.defsize + 1 + size / 20
elif i == "B":
xpos += self.Bmatrixwid * size / self.defsize + 1 + size / 20
elif i == "C":
xpos += self.Cmatrixwid * size / self.defsize + 1 + size / 20
elif i == "D":
xpos += self.Dmatrixwid * size / self.defsize + 1 + size / 20
elif i == "E":
xpos += self.Ematrixwid * size / self.defsize + 1 + size / 20
elif i == "F":
xpos += self.Fmatrixwid * size / self.defsize + 1 + size / 20
elif i == "G":
xpos += self.Gmatrixwid * size / self.defsize + 1 + size / 20
elif i == "H":
xpos += self.Hmatrixwid * size / self.defsize + 1 + size / 20
elif i == "I":
xpos += self.Imatrixwid * size / self.defsize + 1 + size / 20
elif i == "J":
xpos += self.Jmatrixwid * size / self.defsize + 1 + size / 20
elif i == "K":
xpos += self.Kmatrixwid * size / self.defsize + 1 + size / 20
elif i == "L":
xpos += self.Lmatrixwid * size / self.defsize + 1 + size / 20
elif i == "M":
xpos += self.Mmatrixwid * size / self.defsize + 1 + size / 20
elif i == "N":
xpos += self.Nmatrixwid * size / self.defsize + 1 + size / 20
elif i == "O":
xpos += self.Omatrixwid * size / self.defsize + 1 + size / 20
elif i == "P":
xpos += self.Pmatrixwid * size / self.defsize + 1 + size / 20
elif i == "Q":
xpos += self.Qmatrixwid * size / self.defsize + 1 + size / 20
elif i == "R":
xpos += self.Rmatrixwid * size / self.defsize + 1 + size / 20
elif i == "S":
xpos += self.Smatrixwid * size / self.defsize + 1 + size / 20
elif i == "T":
xpos += self.Tmatrixwid * size / self.defsize + 1 + size / 20
elif i == "U":
xpos += self.Umatrixwid * size / self.defsize + 1 + size / 20
elif i == "V":
xpos += self.Vmatrixwid * size / self.defsize + 1 + size / 20
elif i == "W":
xpos += self.Wmatrixwid * size / self.defsize + 1 + size / 20
elif i == "X":
xpos += self.Xmatrixwid * size / self.defsize + 1 + size / 20
elif i == "Y":
xpos += self.Ymatrixwid * size / self.defsize + 1 + size / 20
elif i == "Z":
xpos += self.Zmatrixwid * size / self.defsize + 1 + size / 20
elif i == "1":
xpos += self.onematrixwid * size / self.defsize + 1 + size / 20
elif i == "2":
xpos += self.twomatrixwid * size / self.defsize + 1 + size / 20
elif i == "3":
xpos += self.threematrixwid * size / self.defsize + 1 + size / 20
elif i == "4":
xpos += self.fourmatrixwid * size / self.defsize + 1 + size / 20
elif i == "5":
xpos += self.fivematrixwid * size / self.defsize + 1 + size / 20
elif i == "6":
xpos += self.sixmatrixwid * size / self.defsize + 1 + size / 20
elif i == "7":
xpos += self.sevenmatrixwid * size / self.defsize + 1 + size / 20
elif i == "8":
xpos += self.eightmatrixwid * size / self.defsize + 1 + size / 20
elif i == "9":
xpos += self.ninematrixwid * size / self.defsize + 1 + size / 20
elif i == "0":
xpos += self.tenmatrixwid * size / self.defsize + 1 + size / 20
elif i == "_":
xpos += self._matrixwid * size / self.defsize + 1 + size / 20
elif i == "-":
xpos += self.minusmatrixwid * size / self.defsize + 1 + size / 20
elif i == "+":
xpos += self.plusmatrixwid * size / self.defsize + 1 + size / 20
elif i == "=":
xpos += self.equalmatrixwid * size / self.defsize + 1 + size / 20
elif i == "!":
xpos += self.exclmatrixwid * size / self.defsize + 1 + size / 20
elif i == "@":
xpos += self.atmatrixwid * size / self.defsize + 1 + size / 20
elif i == "#":
xpos += self.hashmatrixwid * size / self.defsize + 1 + size / 20
elif i == "$":
xpos += self.dollarmatrixwid * size / self.defsize + 1 + size / 20
elif i == "%":
xpos += self.percentmatrixwid * size / self.defsize + 1 + size / 20
elif i == "^":
xpos += self.hatmatrixwid * size / self.defsize + 1 + size / 20
elif i == "&":
xpos += self.ampmatrixwid * size / self.defsize + 1 + size / 20
elif i == "*":
xpos += self.strixmatrixwid * size / self.defsize + 1 + size / 20
elif i == "(":
xpos += self.opencparmatrixwid * size / self.defsize + 1 + size / 20
elif i == ")":
xpos += self.closecparmatrixwid * size / self.defsize + 1 + size / 20
elif i == "[":
xpos += self.opensparmatrixwid * size / self.defsize + 1 + size / 20
elif i == "]":
xpos += self.closesparmatrixwid * size / self.defsize + 1 + size / 20
elif i == "\\":
xpos += self.backslashmatrixwid * size / self.defsize + 1 + size / 20
elif i == ";":
xpos += self.semicolmatrixwid * size / self.defsize + 1 + size / 20
elif i == "'":
xpos += self.postmatrixwid * size / self.defsize + 1 + size / 20
elif i == ",":
xpos += self.commamatrixwid * size / self.defsize + 1 + size / 20
elif i == ".":
xpos += self.fullstopmatrixwid * size / self.defsize + 1 + size / 20
elif i == "/":
xpos += self.forslashmatrixwid * size / self.defsize + 1 + size / 20
elif i == "<":
xpos += self.lesthanmatrixwid * size / self.defsize + 1 + size / 20
elif i == ">":
xpos += self.greatthanmatrixwid * size / self.defsize + 1 + size / 20
elif i == "?":
xpos += self.questionmatrixwid * size / self.defsize + 1 + size / 20
elif i == ":":
xpos += self.colonmatrixwid * size / self.defsize + 1 + size / 20
elif i == '"':
xpos += self.quotematrixwid * size / self.defsize + 1 + size / 20
elif i == "{":
xpos += self.opensquigmatrixwid * size / self.defsize + 1 + size / 20
elif i == "}":
xpos += self.closesquigmatrixwid * size / self.defsize + 1 + size / 20
elif i == "|":
xpos += self.barmatrixwid * size / self.defsize + 1 + size / 20
elif i == " ":
xpos += 24 * size / self.defsize + 1 + size / 20
else:
xpos += self.miscmatrixwid * size / self.defsize + 1 + size / 20
return xpos
def setDefaultPenColor(self):
self.currentPen = self.fg
def setPenColor(self, p):
oldColor = self.currentPen
# look for c in palette
pnum = p.toLong()
try:
self.currentPen = self.palette.index(pnum)
except ValueError:
if len(self.palette) < 256:
self.palette.append(pnum)
self.currentPen = len(self.palette) - 1
else:
self.currentPen = self.fg
return Color.fromLong(self.palette[oldColor])
def getPenColor(self):
return Color.fromLong(self.palette[self.currentPen])
def plotPoint(self, x, y):
if 0 <= x < self.wd and 0 <= y < self.ht:
x = int(x)
y = int(y)
self.bitarray[y][x] = self.currentPen
def drawOutRect(self, x, y, wid, ht, lt):
if wid == 1:
if lt != 0:
temp = self.getPenColor()
self.setPenColor(Color.BLACK)
self.drawLine(x, y, x, y + ht - 1)
self.setPenColor(temp)
else:
self.drawLine(x, y, x, y + ht - 1)
else:
if lt > wid / 2 and lt != 1:
lt = wid / 2
if lt > ht / 2 and lt != 1:
lt = ht / 2
self.drawRect(x, y, wid, ht, True)
temp = self.getPenColor()
self.setPenColor(Color.BLACK)
for i in range(lt):
self.drawRect(x + i, y + i, wid - i, ht - i)
self.setPenColor(temp)
def drawRect(self, x, y, wid, ht, fill=False):
x = int(x)
y = int(y)
cury = y
# subtract one for line width
wid -= 1
ht -= 1
self.drawLine(x, y, x + wid, y)
if fill:
cury = y
while cury < y + ht:
self.drawLine(x, cury, x + wid, cury)
cury += 1
else:
self.drawLine(x, y, x, y + ht)
self.drawLine(x + wid, y, x + wid, y + ht)
self.drawLine(x, y + ht, x + wid, y + ht)
def drawSquare(self, x, y, wid, fill=False):
self.drawRect(x, y, wid, wid, fill)
def drawDash(self, x1, y1, x2, y2, wid=1, dashl=3, dashg=3):
currX = x1
currY = y1
if x1 == x2:
for i in range(wid):
currY = y1
while (y1 < y2 and currY + dashl < y2) or (
y1 > y2 and currY - dashl > y2
):
if y1 < y2:
self.drawLine(currX, currY, currX, currY + dashl)
currY = currY + dashl + dashg
else:
self.drawLine(currX, currY, currX, currY - dashl)
currY = currY - dashl - dashg
if (y1 < y2 and currY < y2) or (y2 < y1 and currY > y2):
self.drawLine(currX, currY, currX, y2)
currX -= 1
elif y1 == y2:
for i in range(wid):
currX = x1
while (x1 < x2 and currX + dashl < x2) or (
x1 > x2 and currX - dashl > x1
):
if x1 < x2:
self.drawLine(currX, currY, currX + dashl, currY)
currX = currX + dashl + dashg
else:
self.drawLine(currX, currY, currX - dashl, currY)
currX = currX - dashl - dashg
if (x1 < x2 and currX < x2) or (x2 < x1 and currX > x2):
self.drawLine(currX, currY, x2, currY)
currY -= 1
else:
ratio = abs(x1 - x2) * 1.0 / abs(y1 - y2)
while (x1 < x2 and currX + dashl * min([ratio, 1]) < x2) or (
x1 > x2 and currX - dashl * min([ratio, 1]) > x2
):
if ratio > 1:
if x1 < x2:
nextX = currX + dashl
currXt = currX + dashl + dashg
else:
nextX = currX - dashl
currXt = currX - dashl - dashg
if y1 < y2:
nextY = currY + dashl / ratio
currYt = currY + (dashl + dashg) / ratio
else:
nextY = currY - dashl / ratio
currYt = currY - (dashl + dashg) / ratio
else:
if x1 < x2:
nextX = currX + dashl * ratio
currXt = currX + (dashl + dashg) * ratio
else:
nextX = currX - dashl * ratio
currXt = currX - (dashl + dashg) * ratio
if y1 < y2:
nextY = currY + dashl
currYt = currY + dashl + dashg
else:
nextY = currY - dashl
currYt = currY - (dashl + dashg)
self.drawLine(currX, currY, nextX, nextY)
currX = currXt
currY = currYt
if currX + dashl * min([ratio, 1]) < x2:
self.drawLine(currX, currY, x2, y2)
def drawRightArrow(self, x, y, wid, ht, lt, outline=True):
if lt > ht / 2:
lt = ht / 2
x1 = x + wid
y1 = y + ht / 2
x2 = x + wid - ht / 2
ht -= 1
if wid > ht / 2:
for i in range(y, y + ht + 1):
self.drawLine(x2, i, x1, y1)
self.drawRect(x, y + ht / 4, wid - ht / 2, ht / 2, True)
if outline:
temp = self.getPenColor()
self.setPenColor(Color.BLACK)
for i in range(lt):
self.drawLine(x + i, y + ht / 4 + i, x2 + i, y + ht / 4 + i)
self.drawLine(x2 + i, y + ht / 4 + i, x2 + i, y + i * 2)
self.drawLine(x2 + i, y + i * 2, x1 - i, y1)
self.drawLine(x1 - i, y1, x2 + i, y + ht - i * 2)
self.drawLine(
x2 + i, y + ht - i * 2, x2 + i, y + ht / 4 + ht / 2 - i
)
self.drawLine(
x2 + i, y + ht / 4 + ht / 2 - i, x + i, y + ht / 4 + ht / 2 - i
)
self.drawLine(x + i, y + ht / 4 + ht / 2 - i, x + i, y + ht / 4 + i)
self.setPenColor(temp)
else:
for i in range(y, y + ht + 1):
self.drawLine(x, i, x1, y1)
if outline and lt != 0:
temp = self.getPenColor()
self.setPenColor(Color.BLACK)
for i in range(lt):
self.drawLine(x, y + ht - i, x1 - i, y1)
self.drawLine(x, y + i, x1 - i, y1)
if x1 != x:
crapliney1 = (y1 - y) / (x1 - x) * (x + i - x + 1) + y
crapliney2 = y + ht - (y1 - y) / (x1 - x) * (x + i - x + 1)
self.drawLine(x + i, crapliney1, x + i, crapliney2)
self.setPenColor(temp)
def drawLeftArrow(self, x, y, wid, ht, lt, outline=True):
if lt > ht / 2:
lt = ht / 2
y1 = y + (ht / 2)
x1 = x + wid
x2 = x + ht / 2
ht -= 1
if wid > ht / 2:
for i in range(y, y + ht + 1):
self.drawLine(x, y1, x2, i)
self.drawRect(x2, y + ht / 4, wid - ht / 2, ht / 2, True)
if outline:
temp = self.getPenColor()
self.setPenColor(Color.BLACK)
for i in range(lt):
self.drawLine(x + i, y1, x2 - i, y + i * 2)
self.drawLine(x2 - i, y + i * 2, x2 - i, y + ht / 4 + i)
self.drawLine(x2 - i, y + ht / 4 + i, x1 - i, y + ht / 4 + i)
self.drawLine(
x1 - i, y + ht / 4 + i, x1 - i, y + ht / 4 + ht / 2 - i
)
self.drawLine(
x1 - i, y + ht / 4 + ht / 2 - i, x2 - i, y + ht / 4 + ht / 2 - i
)
self.drawLine(
x2 - i, y + ht / 4 + ht / 2 - i, x2 - i, y + ht - i * 2
)
self.drawLine(x2 - i, y + ht - i * 2, x + i, y1)
self.setPenColor(temp)
else:
if lt > wid / 2:
lt = wid / 2
for i in range(y, y + ht + 1):
self.drawLine(x, y1, x1, i)
if outline and lt != 0:
temp = self.getPenColor()
self.setPenColor(Color.BLACK)
for i in range(lt):
self.drawLine(x + i, y1, x1, y + i)
if x1 != x:
crapliney1 = (y1 - y) / (x1 - x) * (x + i - x + 1) + y
crapliney2 = y + ht - (y1 - y) / (x1 - x) * (x + i - x + 1)
self.drawLine(x1 - i, crapliney1, x1 - i, crapliney2)
self.drawLine(x1, y + ht - i, x + i, y1)
self.setPenColor(temp)
def drawRightFrame(self, x, y, wid, ht, lt, frame, outline=True):
if lt > ht / 2:
lt = ht / 2
if frame == 1:
y1 = y + ht / 2
y2 = y + ht * 5 / 8
y3 = y + ht * 3 / 4
elif frame == 2:
y1 = y + ht * 5 / 8
y2 = y + ht * 3 / 4
y3 = y + ht * 7 / 8
elif frame == 0:
y1 = y + ht * 3 / 4
y2 = y + ht * 7 / 8
y3 = y + ht - 1
x1 = x
x2 = x + wid - ht / 8
x3 = x + wid
if wid > ht / 8:
for i in range(y1, y3 + 1):
self.drawLine(x1, i, x2, i)
self.drawLine(x2, i, x3, y2)
if outline:
temp = self.getPenColor()
self.setPenColor(Color.BLACK)
for i in range(lt):
self.drawLine(x1, y1 + i, x2, y1 + i)
self.drawLine(x2, y1 + i, x3 - i, y2)
self.drawLine(x3 - i, y2, x2, y3 - i)
self.drawLine(x2, y3 - i, x1, y3 - i)
self.drawLine(x1 + i, y3 - i, x1 + i, y1 + i)
self.setPenColor(temp)
else:
for i in range(y1, y3 + 1):
self.drawLine(x1, i, x3, y2)
if outline:
temp = self.getPenColor()
self.setPenColor(Color.BLACK)
self.drawLine(x1, y1, x3, y2)
self.drawLine(x3, y2, x1, y3)
self.drawLine(x1, y1, x1, y3)
self.setPenColor(temp)
def drawRightFrameRect(self, x, y, wid, ht, lt, frame, outline=True):
if lt > ht / 2:
lt = ht / 2
if frame == 1:
y1 = y + ht / 2
y3 = y + ht * 3 / 4
elif frame == 2:
y1 = y + ht * 5 / 8
y3 = y + ht * 7 / 8
elif frame == 0:
y1 = y + ht * 3 / 4
y3 = y + ht - 1
x1 = x
x2 = x + wid
for i in range(y1, y3 + 1):
self.drawLine(x1, i, x2, i)
if outline:
temp = self.getPenColor()
self.setPenColor(Color.BLACK)
for i in range(lt):
self.drawLine(x1, y1 + i, x2, y1 + i)
self.drawLine(x2, y1 + i, x2 - i, y3)
self.drawLine(x2, y3 - i, x1, y3 - i)
self.drawLine(x1 + i, y3 - i, x1 + i, y1 + i)
self.setPenColor(temp)
def drawLeftFrame(self, x, y, wid, ht, lt, frame, outline=True):
if lt > ht / 2:
lt = ht / 2
if frame == 1:
y1 = y
y2 = y + ht / 8
y3 = y + ht / 4
elif frame == 2:
y1 = y + ht / 8
y2 = y + ht / 4
y3 = y + ht * 3 / 8
elif frame == 0:
y1 = y + ht / 4
y2 = y + ht * 3 / 8
y3 = y + ht / 2
x1 = x + wid
x2 = x + ht / 8
x3 = x
if wid > ht / 8:
for i in range(y1, y3 + 1):
self.drawLine(x1, i, x2, i)
self.drawLine(x2, i, x3, y2)
if outline:
temp = self.getPenColor()
self.setPenColor(Color.BLACK)
for i in range(lt):
self.drawLine(x1, y1 + i, x2, y1 + i)
self.drawLine(x2, y1 + i, x3 - i, y2)
self.drawLine(x3 - i, y2, x2, y3 - i)
self.drawLine(x2, y3 - i, x1, y3 - i)
self.drawLine(x1 + i, y3 - i, x1 + i, y1 + i)
self.setPenColor(temp)
else:
for i in range(y1, y3 + 1):
self.drawLine(x1, i, x3, y2)
if outline:
temp = self.getPenColor()
self.setPenColor(Color.BLACK)
self.drawLine(x1, y1, x3, y2)
self.drawLine(x3, y2, x1, y3)
self.drawLine(x1, y1, x1, y3)
self.setPenColor(temp)
def drawLeftFrameRect(self, x, y, wid, ht, lt, frame, outline=True):
if lt > ht / 2:
lt = ht / 2
if frame == 1:
y1 = y
y3 = y + ht / 4
elif frame == 2:
y1 = y + ht / 8
y3 = y + ht * 3 / 8
elif frame == 0:
y1 = y + ht / 4
y3 = y + ht / 2
x1 = x + wid
x2 = x
for i in range(y1, y3 + 1):
self.drawLine(x1, i, x2, i)
if outline:
temp = self.getPenColor()
self.setPenColor(Color.BLACK)
for i in range(lt):
self.drawLine(x1, y1 + i, x2, y1 + i)
self.drawLine(x2 - i, y1, x2 - i, y3)
self.drawLine(x2, y3 - i, x1, y3 - i)
self.drawLine(x1 + i, y3, x1 + i, y1)
self.setPenColor(temp)
def drawPointer(self, x, y, ht, lt, outline=True):
x1 = x - int(round(0.577350269 * ht / 2))
x2 = x + int(round(0.577350269 * ht / 2))
y1 = y + ht / 2
y2 = y + ht - 1
for i in range(x1, x2 + 1):
self.drawLine(i, y2, x, y1)
if outline:
temp = self.getPenColor()
self.setPenColor(Color.BLACK)
self.drawLine(x, y1, x1, y2)
self.drawLine(x, y1, x2, y2)
self.drawLine(x1, y2, x2, y2)
self.setPenColor(temp)
def bresLine(x, y, x2, y2):
"""Bresenham line algorithm"""
steep = 0
coords = []
dx = int(abs(x2 - x) + 0.5)
if (x2 - x) > 0:
sx = 1
else:
sx = -1
dy = int(abs(y2 - y) + 0.5)
if (y2 - y) > 0:
sy = 1
else:
sy = -1
if dy > dx:
steep = 1
x, y = y, x
dx, dy = dy, dx
sx, sy = sy, sx
dx2 = dx * 2
dy2 = dy * 2
d = dy2 - dx
for i in range(0, dx):
coords.append((x, y))
while d >= 0:
y += sy
d -= dx2
x += sx
d += dy2
if steep: # transpose x's and y's
coords = [(c[1], c[0]) for c in coords]
coords.append((x2, y2))
return coords
bresLine = staticmethod(bresLine)
def _drawLine(self, x1, y1, x2, y2):
# special checks for vert and horiz lines
if x1 == x2:
if 0 <= x1 < self.wd:
if y2 < y1:
y1, y2 = y2, y1
cury = max(y1, 0)
maxy = min(y2, self.ht - 1)
while cury <= maxy:
self.plotPoint(x1, cury)
cury += 1
return
if y1 == y2:
if 0 <= y1 < self.ht:
if x2 < x1:
x1, x2 = x2, x1
curx = max(x1, 0)
maxx = min(x2, self.wd - 1)
while curx <= maxx:
self.plotPoint(curx, y1)
curx += 1
return
for pt in BitMap.bresLine(x1, y1, x2, y2):
self.plotPoint(pt[0], pt[1])
def _drawLines(self, lineSegs):
for x1, y1, x2, y2 in lineSegs:
self._drawLine(x1, y1, x2, y2)
def drawLine(self, x1, y1, x2, y2, type=LINE_SOLID):
if type == BitMap.LINE_SOLID:
self._drawLine(x1, y1, x2, y2)
elif type == BitMap.LINE_DASHED:
# how many segs?
len = hypot(x2 - x1, y2 - y1)
numsegs = len / BitMap._DASH_LEN
dx = (x2 - x1) / numsegs
dy = (y2 - y1) / numsegs
dx2 = dx / 2.0
dy2 = dy / 2.0
if x2 < x1:
x1, x2 = x2, x1
y1, y2 = y2, y1
segs = []
curx = x1
cury = y1
for i in range(int(numsegs)):
segs.append((curx, cury, curx + dx2, cury + dy2))
curx += dx
cury += dy
if curx + dx2 > x2:
segs.append((curx, cury, x2, y2))
else:
segs.append((curx, cury, curx + dx2, cury + dy2))
self._drawLines(segs)
elif type == BitMap.LINE_DOTTED:
len = hypot(x2 - x1, y2 - y1)
numsegs = len / BitMap._DOT_LEN
dx = (x2 - x1) / numsegs
dy = (y2 - y1) / numsegs
dx2 = dx / 2.0
dy2 = dy / 2.0
if x2 < x1:
x1, x2 = x2, x1
y1, y2 = y2, y1
segs = []
curx = x1
cury = y1
for i in range(int(numsegs)):
segs.append((curx, cury, curx + dx2, cury + dy2))
curx += dx
cury += dy
if curx + dx2 > x2:
segs.append((curx, cury, x2, y2))
else:
segs.append((curx, cury, curx + dx2, cury + dy2))
self._drawLines(segs)
elif type == BitMap.LINE_DOT_DASH:
len = hypot(x2 - x1, y2 - y1)
numsegs = len / BitMap._DOT_DASH_LEN
dx = (x2 - x1) / numsegs
dy = (y2 - y1) / numsegs
dx3 = dx / 3.0
dy3 = dy / 3.0
dx23 = 0.62 * dx
dy23 = 0.62 * dy
dx56 = 0.78 * dx
dy56 = 0.78 * dy
if x2 < x1:
x1, x2 = x2, x1
y1, y2 = y2, y1
segs = []
curx = x1
cury = y1
for i in range(int(numsegs)):
segs.append((curx, cury, curx + dx3, cury + dy3))
segs.append((curx + dx23, cury + dy23, curx + dx56, cury + dy56))
curx += dx
cury += dy
if curx + dx3 > x2:
segs.append((curx, cury, x2, y2))
else:
segs.append((curx, cury, curx + dx3, cury + dy3))
if curx + dx23 < x2:
if curx + dx56 > x2:
segs.append((curx + dx23, cury + dy23, x2, y2))
else:
segs.append(
(curx + dx23, cury + dy23, curx + dx56, cury + dy56)
)
else:
pass # segs.append( ( curx, cury, curx + dx3, cury + dy3 ) )
segs.append((curx, cury, x2, y2))
self._drawLines(segs)
def drawCircle(self, cx, cy, r, fill=False):
x = 0
y = r
d = 1 - r
self.plotPoint(cx, cy + y)
self.plotPoint(cx, cy - y)
if fill:
self.drawLine(cx - y, cy, cx + y, cy)
else:
self.plotPoint(cx + y, cy)
self.plotPoint(cx - y, cy)
while y > x:
if d < 0:
d += 2 * x + 3
else:
d += 2 * (x - y) + 5
y -= 1
x += 1
if fill:
self.drawLine(cx + x - 1, cy + y, cx - x + 1, cy + y)
self.drawLine(cx - x + 1, cy - y, cx + x - 1, cy - y)
self.drawLine(cx + y - 1, cy + x, cx - y + 1, cy + x)
self.drawLine(cx - y + 1, cy - x, cx + y - 1, cy - x)
else:
self.plotPoint(cx + x, cy + y)
self.plotPoint(cx + y, cy + x)
self.plotPoint(cx - x, cy - y)
self.plotPoint(cx - y, cy - x)
self.plotPoint(cx + x, cy - y)
self.plotPoint(cx - y, cy + x)
self.plotPoint(cx - x, cy + y)
self.plotPoint(cx + y, cy - x)
# method for creating gif string
def createGIFString(self, oneistone):
if not oneistone:
if self.wd > 1000:
modifier = 1000.0 / self.wd
self.wd = 1000
self.ht = int(self.ht * modifier)
else:
oneistone = True
binarystring = "GIF89a" # header
binarystring += (
struct.pack("h", self.wd)
+ struct.pack("h", self.ht)
+ chr(int("10010110", 2))
+ "\x00\x00"
)
if len(self.palette) <= 128:
for i in self.palette:
colour = Color.fromLong(i)
binarystring += chr(colour.red) + chr(colour.grn) + chr(colour.blu)
for i in range(128 - len(self.palette)):
binarystring += chr(255) + chr(255) + chr(255)
else:
for i in range(8): # 128 colour table
for j in range(4):
for k in range(4):
binarystring += chr(int(i * 36.5)) + chr(j * 85) + chr(k * 85)
binarystring += (
",\x00\x00\x00\x00"
+ struct.pack("h", self.wd)
+ struct.pack("h", self.ht)
+ "\x00"
) # image descriptor
binarystring += "\x07" # LZW Minimum code size
pixstring = ""
self.bitarray.reverse()
for a, i in enumerate(self.bitarray):
for b, j in enumerate(i):
if len(self.palette) <= 128:
if oneistone:
pixstring += chr(j)
else:
if (
int(a * modifier) != int((a - 1) * modifier) or a == 0
) and (int(b * modifier) != int((b - 1) * modifier) or b == 0):
pixstring += chr(j)
else:
colourhash = self.palette[j]
colour = Color.fromLong(colourhash)
colbin = (
"{0:b}".format(colour.red / 32).zfill(3)
+ "{0:b}".format(colour.grn / 64).zfill(2)
+ "{0:b}".format(colour.blu / 64).zfill(2)
)
if oneistone:
pixstring += chr(int(colbin, 2))
else:
if (
int(a * modifier) != int((a - 1) * modifier) or a == 0
) and (int(b * modifier) != int((b - 1) * modifier) or b == 0):
pixstring += chr(int(colbin, 2))
for i in range(0, len(pixstring), 8):
binarystring += "\x09\x80" + pixstring[i : i + 8]
binarystring += "\x01\x81"
binarystring += "\x00;"
return base64.b64encode(binarystring)
def _saveBitMapNoCompression(self, filename):
# open file
f = file(filename, "wb")
# write bitmap header
f.write("BM")
f.write(
longToString(54 + 256 * 4 + self.ht * self.wd)
) # DWORD size in bytes of the file
f.write(longToString(0)) # DWORD 0
f.write(longToString(54 + 256 * 4)) # DWORD offset to the data
f.write(longToString(40)) # DWORD header size = 40
f.write(longToString(self.wd)) # DWORD image width
f.write(longToString(self.ht)) # DWORD image height
f.write(shortToString(1)) # WORD planes = 1
f.write(shortToString(8)) # WORD bits per pixel = 8
f.write(longToString(0)) # DWORD compression = 0
f.write(
longToString(self.wd * self.ht)
) # DWORD sizeimage = size in bytes of the bitmap = width * height
f.write(longToString(0)) # DWORD horiz pixels per meter (?)
f.write(longToString(0)) # DWORD ver pixels per meter (?)
f.write(longToString(256)) # DWORD number of s used = 256
f.write(
longToString(len(self.palette))
) # DWORD number of "import s = len( self.palette )
# write bitmap palette
for clr in self.palette:
f.write(longToString(clr))
for i in range(len(self.palette), 256):
f.write(longToString(0))
# write pixels
for row in self.bitarray:
for pixel in row:
f.write(chr(pixel))
padding = (4 - len(row) % 4) % 4
for i in range(padding):
f.write(chr(0))
# close file
f.close()
def _saveBitMapWithCompression(self, filename):
"""
"""
# open file
f = file(filename, "wb")
# write bitmap header
f.write("BM")
f.write(
longToString(54 + 256 * 4 + self.ht * self.wd)
) # DWORD size in bytes of the file
f.write(longToString(0)) # DWORD 0
f.write(longToString(54 + 256 * 4)) # DWORD offset to the data
f.write(longToString(40)) # DWORD header size = 40
f.write(longToString(self.wd)) # DWORD image width
f.write(longToString(self.ht)) # DWORD image height
f.write(shortToString(1)) # WORD planes = 1
f.write(shortToString(8)) # WORD bits per pixel = 8
f.write(longToString(1)) # DWORD compression = 1=RLE8
f.write(
longToString(self.wd * self.ht)
) # DWORD sizeimage = size in bytes of the bitmap = width * height
f.write(longToString(0)) # DWORD horiz pixels per meter (?)
f.write(longToString(0)) # DWORD ver pixels per meter (?)
f.write(longToString(256)) # DWORD number of s used = 256
f.write(
longToString(len(self.palette))
) # DWORD number of "import s = len( self.palette )
# write bitmap palette
for clr in self.palette:
f.write(longToString(clr))
for i in range(len(self.palette), 256):
f.write(longToString(0))
# write pixels
pixelBytes = 0
for row in self.bitarray:
rleStart = 0
curPixel = rleStart + 1
while curPixel < len(row):
if row[curPixel] != row[rleStart] or curPixel - rleStart == 255:
# write out from rleStart thru curPixel-1
f.write(chr(curPixel - rleStart))
f.write(chr(row[rleStart]))
pixelBytes += 2
rleStart = curPixel
else:
pass
curPixel += 1
# write out last run of s
f.write(chr(curPixel - rleStart))
f.write(chr(row[rleStart]))
pixelBytes += 2
# end of line code
f.write(chr(0))
f.write(chr(0))
pixelBytes += 2
# end of bitmap code
f.write(chr(0))
f.write(chr(1))
pixelBytes += 2
# now fix sizes in header
f.seek(2)
f.write(
longToString(54 + 256 * 4 + pixelBytes)
) # DWORD size in bytes of the file
f.seek(34)
f.write(longToString(pixelBytes)) # DWORD size in bytes of the file
# close file
f.close()
def saveFile(self, filename, compress=True):
if compress:
self._saveBitMapWithCompression(filename)
else:
self._saveBitMapNoCompression(filename)
# For converting arttemis colour codes to an RGB value
def artColourTable(x):
if x == 0:
y = (255, 255, 255)
elif x == 1:
y = (100, 100, 100)
elif x == 2:
y = (255, 0, 0)
elif x == 3:
y = (0, 255, 0)
elif x == 4:
y = (0, 0, 255)
elif x == 5:
y = (0, 255, 255)
elif x == 6:
y = (255, 0, 255)
elif x == 7:
y = (255, 255, 0)
elif x == 8:
y = (152, 255, 152)
elif x == 9:
y = (135, 206, 250)
elif x == 10:
y = (255, 165, 0)
elif x == 11:
y = (200, 150, 100)
elif x == 12:
y = (255, 200, 200)
elif x == 13:
y = (170, 170, 170)
elif x == 14:
y = (0, 0, 0)
elif x == 15:
y = (255, 63, 63)
elif x == 16:
y = (255, 127, 127)
elif x == 17:
y = (255, 191, 191)
return y
# For reading in genbank or EMBL files
def getArrows(filename, legname):
length = None
theseq = ""
genbank = open(filename)
outlist = []
getFeats = False
emblcheck = False
getColour = False
getemblseq = False
getgenseq = False
getmultifasta = False
for line in genbank:
if "\t" in line:
sys.stderr.write(
"Tab found in genbank file, this may cause some Features to be missed, please remove tabs."
)
if line.startswith("FEATURES") or line.startswith(" source"):
getFeats = True
elif line.startswith("FT"):
getFeats = True
emblcheck = True
elif line.startswith(">") and not getFeats:
if getmultifasta:
theseq += "qqq"
else:
getmultifasta = True
elif getmultifasta:
theseq += line.rstrip()
elif not line.startswith(" ") and not emblcheck:
getFeats = False
elif emblcheck:
getFeats = False
if line[2:].startswith(" ") and line[5] != " " and getFeats:
feat, loc = line[2:].split()
if "join(" in loc or "order(" in loc:
if loc.startswith("join("):
temp2 = loc[5:-1].split(",")
strand = "+"
elif loc.startswith("complement("):
temp2 = loc[11:-1].split(",")
strand = "-"
elif loc.startswith("order("):
temp2 = loc[6:-1].split(",")
temp = [[], []]
gotit = True
for i in temp2:
if ":" in i:
i = i.split(":")[1]
if "<" in i:
i = i.replace("<", "")
if ">" in i:
i = i.replace(">", "")
if ")" in i:
i = i.replace(")", "")
if i.startswith("complement("):
strand = "-"
i = i[11:]
if i.startswith("join("):
i = i[5:]
if i.startswith("order("):
i = i[6:]
if ".." in i:
try:
start, stop = i.split("..")
except:
start, stop = "x", "y"
elif "^" in i:
try:
start, stop = i.split("^")
except:
start, stop = "x", "y"
elif "." in i:
try:
start, stop = i.split(".")
except:
start, stop = "x", "y"
else:
if i.startswith("gap("):
start = None
else:
start = i
stop = i
try:
if start != None:
temp[0].append(int(start))
temp[1].append(int(stop))
except:
if gotit:
print ("feature could not be processed:\n" + line)
gotit = False
if gotit:
aninstance = feature(temp[0], temp[1], feat, strand, None, None)
outlist.append(aninstance)
if feat == "source":
try:
lengtht = max([max(temp[0]), max(temp[1])])
if lengtht > length:
length = lengtht
except:
pass
else:
if loc.startswith("complement("):
strand = "-"
loc = loc[11:-1]
else:
strand = "+"
if ":" in loc:
loc = loc.split(":")[1]
if "<" in loc:
loc = loc.replace("<", "")
if ">" in loc:
loc = loc.replace(">", "")
if ".." in loc:
try:
start, stop = loc.split("..")
except:
start, stop = "x", "y"
elif "^" in loc:
try:
start, stop = loc.split("^")
except:
start, stop = "x", "y"
elif "." in loc:
try:
start, stop = loc.split(".")
except:
start, stop = "x", "y"
else:
start = loc
stop = loc
try:
aninstance = feature(
int(start), int(stop), feat, strand, None, None
)
outlist.append(aninstance)
except:
print ("feature could not be processed:\n" + line)
if feat == "source":
try:
lengtht = max([int(start), int(stop)])
if lengtht > length:
length = lengtht
except:
pass
elif line[2:].startswith(" /colour=") and getFeats:
temp = line[27:-1]
temp = temp.replace('"', "")
temp = temp.replace("'", "")
artColourF = temp.split()
try:
if len(artColourF) == 1:
artColour = artColourTable(int(artColourF[0]))
else:
artColour = (
int(artColourF[0]),
int(artColourF[1]),
int(artColourF[2]),
)
outlist[-1].colour = artColour
except:
print ("Colour could not be processed:\n" + line)
elif line[2:].startswith(" /color=") and getFeats:
temp = line[26:-1]
temp = temp.replace('"', "")
temp = temp.replace("'", "")
artColourF = temp.split()
try:
if len(artColourF) == 1:
artColour = artColourTable(int(artColourF[0]))
else:
artColour = (
int(artColourF[0]),
int(artColourF[1]),
int(artColourF[2]),
)
outlist[-1].colour = artColour
except:
print ("Colour could not be processed:\n" + line)
elif line[2:].startswith(" /colour=") and getFeats:
temp = line[29:-1]
temp = temp.replace('"', "")
temp = temp.replace("'", "")
artColourF = temp.split()
try:
if len(artColourF) == 1:
artColour = artColourTable(int(artColourF[0]))
else:
artColour = (
int(artColourF[0]),
int(artColourF[1]),
int(artColourF[2]),
)
outlist[-1].colour = artColour
except:
print ("Colour could not be processed:\n" + line)
elif line[2:].startswith(" /color=") and getFeats:
temp = line[28:-1]
temp = temp.replace('"', "")
temp = temp.replace("'", "")
try:
artColourF = temp.split()
if len(artColourF) == 1:
artColour = artColourTable(int(artColourF[0]))
else:
artColour = (
int(artColourF[0]),
int(artColourF[1]),
int(artColourF[2]),
)
except:
print ("Colour could not be processed:\n" + line)
outlist[-1].colour = artColour
elif (
line[2:].startswith(" /gene=")
and getFeats
and legname == "gene"
):
outlist[-1].name = line.rstrip()[27:].replace('"', "")
elif (
line[2:].startswith(" /product=")
and getFeats
and legname == "product"
):
outlist[-1].name = line.rstrip()[30:].replace('"', "")
elif (
line[2:].startswith(" /locus_tag=")
and getFeats
and legname == "locus_tag"
):
outlist[-1].name = line.rstrip()[32:].replace('"', "")
elif (
line[2:].startswith(" /note=")
and getFeats
and legname == "note"
):
outlist[-1].name = line.rstrip()[27:].replace('"', "")
elif line.startswith("ORIGIN") and length == None:
getgenseq = True
elif line.startswith("SQ Sequence ") and length == None:
getemblseq = True
elif line.startswith("//"):
getemblseq = False
getgenseq = False
if length == None:
length = len(theseq)
elif getemblseq:
theseq += "".join(line.split()[:-1])
elif getgenseq:
theseq += "".join(line.split()[1:])
if getmultifasta:
insertSize = len(theseq) / 500
multifastapos = 1
for i in theseq.split("qqq"):
aninstance = feature(
multifastapos, multifastapos + len(i) - 1, "contig", "+", None, None
)
outlist.append(aninstance)
multifastapos += len(i) - 1 + insertSize
theseq = theseq.replace("qqq", "n" * int(len(theseq) / 500))
if length == None and theseq != "":
length = len(theseq)
return length, outlist
# detects whether blast+ is in your path
def isNewBlastDB():
path = os.environ["PATH"].split(os.pathsep)
isit = False
for i in path:
if os.path.exists(i + "/" + "makeblastdb.exe") or os.path.exists(
i + "/" "makeblastdb"
):
isit = True
return isit
def isNewBlastn():
path = os.environ["PATH"].split(os.pathsep)
isit = False
for i in path:
if os.path.exists(i + "/" + "blastn.exe") or os.path.exists(i + "/" "blastn"):
isit = True
return isit
def isNewTblastx():
path = os.environ["PATH"].split(os.pathsep)
isit = False
for i in path:
if os.path.exists(i + "/" + "tblastx.exe") or os.path.exists(i + "/" "tblastx"):
isit = True
return isit
# detects legacy blast in your path
def isLegBlastDB():
path = os.environ["PATH"].split(os.pathsep)
isit = False
for i in path:
if os.path.exists(i + "/" + "formatdb.exe") or os.path.exists(
i + "/" "formatdb"
):
isit = True
return isit
def isLegBlastall():
path = os.environ["PATH"].split(os.pathsep)
isit = False
for i in path:
if os.path.exists(i + "/" + "blastall.exe") or os.path.exists(
i + "/" "blastall"
):
isit = True
return isit
# gets all blast hits length >minlength and e value < mineval and identity > minident
def getBlast(filename, minlength, mineval, minident):
if filename == "":
return []
blast = open(filename)
testline = blast.readline()
try:
(
query,
subject,
ident,
length,
mismatch,
indel,
qStart,
qEnd,
rStart,
rEnd,
eVal,
bitScore,
) = testline.split()
ident = float(ident)
length = int(length)
mismatch = int(mismatch)
indel = int(indel)
qStart = int(qStart)
qEnd = int(qEnd)
rStart = int(rStart)
rEnd = int(rEnd)
eVal = float(eVal)
bitScore = float(bitScore)
crunch = False
except:
crunch = True
blast.close()
blast = open(filename)
outlist = []
for line in blast:
if crunch:
score, ident, qStart, qEnd, query, rStart, rEnd, subject = line.split()[:8]
qStart = int(qStart)
qEnd = int(qEnd)
eVal = 0
length = abs(qStart - qEnd)
else:
(
query,
subject,
ident,
length,
mismatch,
indel,
qStart,
qEnd,
rStart,
rEnd,
eVal,
bitScore,
) = line.split()
eVal = float(eVal)
ident = float(ident)
length = int(length)
qStart = int(qStart)
qEnd = int(qEnd)
rStart = int(rStart)
rEnd = int(rEnd)
eVal = float(eVal)
if length >= minlength and eVal <= mineval and ident >= minident:
outlist.append((qStart, qEnd, rStart, rEnd, ident))
return outlist
# draws teh image
def draw(
filename,
minlength,
mineval,
minIdent,
inputlist,
width,
height1,
height2,
minblastc,
maxblastc,
minblastci,
maxblastci,
drawfig1,
drawfig2,
drawfig3,
compress,
reverseList,
featDict,
glt,
exont,
genet,
featlengths,
aln,
graphit,
blastoutline,
minmaxlist,
autodetect,
legend,
legname,
writebmp=0,
):
# global variable for stopping script midway
global abortCaptain
secondlist = []
maxlength = 0
totalheight = 0
# returning a minident value of 101 means the script has been aborted
minident = 101
# gets feature file and blast information
for i in range(0, len(inputlist)):
if i % 2 == 0:
temp = getArrows(inputlist[i], legname)
thirdlist = []
if minmaxlist[int(i / 2)][1] == "Max":
if temp[0] == None:
maxcut = featlengths[int(i / 2)]
else:
maxcut = temp[0]
if minmaxlist[int(i / 2)][0] == 1:
minmaxopt = 0
else:
minmaxopt = 1
mincut = minmaxlist[int(i / 2)][0]
else:
mincut = minmaxlist[int(i / 2)][0]
maxcut = minmaxlist[int(i / 2)][1]
if minmaxlist[int(i / 2)][0] < minmaxlist[int(i / 2)][1]:
minmaxopt = 1
else:
minmaxopt = 2
for j in temp[1]:
if j.type in featDict:
if j.colour == None:
j.colour = featDict[j.type][1]
if minmaxopt == 0:
thirdlist.append(j)
elif minmaxopt == 1:
if type(j.start) == int:
if j.start >= mincut and j.stop <= maxcut:
aninstance = feature(
j.start - mincut + 1,
j.stop - mincut + 1,
j.type,
j.strand,
j.colour,
j.name,
)
thirdlist.append(aninstance)
else:
if j.start[0] >= mincut and j.stop[-1] <= maxcut:
tempstart = []
for k in j.start:
tempstart.append(k - mincut + 1)
tempstop = []
for k in j.stop:
tempstop.append(k - mincut + 1)
aninstance = feature(
tempstart,
tempstop,
j.type,
j.strand,
j.colour,
j.name,
)
thirdlist.append(aninstance)
elif minmaxopt == 2:
if temp[0] == None:
templength = featlengths[int(i / 2)]
else:
templength = temp[0]
if type(j.start) == int:
if j.stop <= maxcut:
tempstart = j.start + templength - mincut + 1
tempstop = j.stop + templength - mincut + 1
aninstance = feature(
tempstart,
tempstop,
j.type,
j.strand,
j.colour,
j.name,
)
thirdlist.append(aninstance)
elif j.start >= mincut:
tempstart = j.start - mincut + 1
tempstop = j.stop - mincut + 1
aninstance = feature(
tempstart,
tempstop,
j.type,
j.strand,
j.colour,
j.name,
)
thirdlist.append(aninstance)
else:
if j.stop[-1] <= maxcut:
tempstart = []
for k in j.start:
tempstart.append(k + templength - mincut + 1)
tempstop = []
for k in j.stop:
tempstop.append(k + templength - mincut + 1)
aninstance = feature(
tempstart,
tempstop,
j.type,
j.strand,
j.colour,
j.name,
)
thirdlist.append(aninstance)
elif j.start[0] >= mincut:
tempstart = []
for k in j.start:
tempstart.append(k - mincut + 1)
tempstop = []
for k in j.stop:
tempstop.append(k - mincut + 1)
aninstance = feature(
tempstart,
tempstop,
j.type,
j.strand,
j.colour,
j.name,
)
thirdlist.append(aninstance)
thirdlist.sort(key=lambda ii: ii.length(), reverse=True)
if minmaxopt == 0:
if temp[0] == None:
secondlist.append((featlengths[int(i / 2)], thirdlist))
if featlengths[int(i / 2)] > maxlength:
maxlength = featlengths[int(i / 2)]
else:
secondlist.append((temp[0], thirdlist))
if temp[0] > maxlength:
maxlength = temp[0]
elif minmaxopt == 1:
secondlist.append((maxcut - mincut + 1, thirdlist))
if maxcut - mincut + 1 > maxlength:
maxlength = maxcut - mincut + 1
elif minmaxopt == 2:
if temp[0] == None:
templength = featlengths[int(i / 2)]
else:
templength = temp[0]
secondlist.append((templength - mincut + maxcut + 1, thirdlist))
if templength - mincut + maxcut + 1 > maxlength:
maxlength = templength - mincut + maxlength + 1
totalheight += height1
else:
totalheight += height2
temp = getBlast(inputlist[i], minlength, mineval, minIdent)
for j in temp:
if j[4] < minident:
minident = j[4]
secondlist.append(temp)
# calculates offsets for genomes if best blast alignment is selected
if autodetect and maxlength > 100000:
tempsecond = []
minident = 101
for i in range(len(secondlist)):
temp = []
if i % 2 == 0:
for j in secondlist[i][1]:
if type(j.start) == int:
if (j.stop - j.start) * 1.0 / maxlength * width > 4:
temp.append(j)
else:
if (j.stop[0] - j.start[0]) * 1.0 / maxlength * width > 4:
temp.append(j)
tempsecond.append((secondlist[i][0], temp))
else:
for j in secondlist[i]:
if (j[1] - j[0]) * 1.0 / maxlength * width > 3:
temp.append(j)
if j[4] < minident:
minident = j[4]
tempsecond.append(temp)
secondlist = tempsecond
if minIdent != 0:
minident = minIdent
if aln == "best blast":
blastmatch = [0]
for i in range(1, len(secondlist), 2):
maxbitscore = 0
for j in secondlist[i]:
if j[1] - j[0] > maxbitscore:
qstart, qend, rstart, rend = j[0], j[1], j[2], j[3]
maxbitscore = j[1] - j[0]
if len(secondlist[i]) == 0:
theQstart = 0
elif reverseList[int(i / 2)]:
theQstart = secondlist[i - 1][0] - qend
else:
theQstart = qstart
if reverseList[int((i + 1) / 2)]:
if len(secondlist[i]) == 0:
theRstart = 0
elif rstart < rend:
theRstart = secondlist[i + 1][0] - rend
else:
theRstart = secondlist[i + 1][0] - rstart
else:
if len(secondlist[i]) == 0:
theRstart = 0
elif rstart < rend:
theRstart = rstart
else:
theRstart = rend
blastmatch.append(blastmatch[-1] + theQstart - theRstart)
theminblast = min(blastmatch)
templist = []
for i in blastmatch:
templist.append(i - theminblast)
blastmatch = templist
for i in range(0, len(secondlist) + 1, 2):
if secondlist[i][0] + blastmatch[int(i / 2)] > maxlength:
maxlength = secondlist[i][0] + blastmatch[int(i / 2)]
leghei = 0
if legend == "Single column" or legend == "Two columns":
legendArrows = set()
legendList = []
for i in range(len(secondlist)):
if i % 2 == 0:
legendList.append([])
for j in secondlist[i][1]:
if (
j.name != None
and (j.name, j.colour, featDict[j.type][0]) not in legendArrows
):
legendArrows.add((j.name, j.colour, featDict[j.type][0]))
if type(j.start) == int:
tempjstart = j.start
else:
tempjstart = j.start[0]
legendList[int(i / 2)].append(
(j.name, j.colour, featDict[j.type][0], tempjstart)
)
if legend == "Single column":
leghei += min([5000, len(legendArrows) * 90])
elif legend == "Two columns":
leghei = min([5000, (len(legendArrows) + 1) / 2 * 90])
global shifter
if legend == "Top" or legend == "Top & Bottom":
toplegpos = [0, 0, 0, set(), set(), set()]
legendTop = []
testbmp = BitMap(10, 10)
if aln == "best blast":
shifter = blastmatch[0]
genrev1 = reverseList[0]
for j in secondlist[0][1]:
if j.name != None:
if type(j.start) == int:
firstleg = True
secondleg = True
thirdleg = True
if genrev1:
legpos = convertPosR(
secondlist[0][0],
maxlength,
width,
(j.start + j.stop) / 2,
aln,
)
else:
legpos = convertPos(
secondlist[0][0],
maxlength,
width,
(j.start + j.stop) / 2,
aln,
)
for q in range(legpos - 40, legpos + 50):
if q in toplegpos[3]:
firstleg = False
if q in toplegpos[4]:
secondleg = False
if q in toplegpos[5]:
thirdleg = False
if firstleg:
therung = 1
if testbmp.lengthString(j.name[:10], 64) > toplegpos[0]:
toplegpos[0] = testbmp.lengthString(j.name[:10], 64)
for q in range(legpos - 40, legpos + 50):
toplegpos[3].add(q)
# elif secondleg:
# therung = 2
# if testbmp.lengthString(j.name[:10], 64) > toplegpos[1]:
# toplegpos[1] = testbmp.lengthString(j.name[:10], 64)
# for q in range(legpos - 40, legpos + 50):
# toplegpos[4].add(q)
# elif thirdleg:
# therung = 3
# if testbmp.lengthString(j.name[:10], 64) > toplegpos[2]:
# toplegpos[2] = testbmp.lengthString(j.name[:10], 64)
# for q in range(legpos - 40, legpos + 50):
# toplegpos[5].add(q)
else:
therung = None
legendTop.append((j.name[:10], legpos, therung))
else:
firstleg = True
secondleg = True
thirdleg = True
if genrev1:
legpos = convertPosR(
secondlist[0][0],
maxlength,
width,
(j.start[0] + j.stop[0]) / 2,
aln,
)
else:
legpos = convertPos(
secondlist[0][0],
maxlength,
width,
(j.start[0] + j.stop[0]) / 2,
aln,
)
for q in range(legpos - 40, legpos + 50):
if q in toplegpos[3]:
firstleg = False
if q in toplegpos[4]:
secondleg = False
if q in toplegpos[5]:
thirdleg = False
if firstleg:
therung = 1
if testbmp.lengthString(j.name[:10], 64) > toplegpos[0]:
toplegpos[0] = testbmp.lengthString(j.name[:10], 64)
for q in range(legpos - 40, legpos + 50):
toplegpos[3].add(q)
# elif secondleg:
# therung = 2
# if testbmp.lengthString(j.name[:10], 64) > toplegpos[1]:
# toplegpos[1] = testbmp.lengthString(j.name[:10], 64)
# for q in range(legpos - 40, legpos + 50):
# toplegpos[4].add(q)
# elif thirdleg:
# therung = 3
# if testbmp.lengthString(j.name[:10], 64) > toplegpos[2]:
# toplegpos[2] = testbmp.lengthString(j.name[:10], 64)
# for q in range(legpos - 40, legpos + 50):
# toplegpos[5].add(q)
else:
therung = None
legendTop.append((j.name[:10], legpos, therung))
totalheight += sum(toplegpos[:3]) + 40
if legend == "Bottom" or legend == "Top & Bottom":
botlegpos = [0, 0, 0, set(), set(), set()]
legendBot = []
testbmp = BitMap(10, 10)
if aln == "best blast":
shifter = blastmatch[-1]
genrev1 = reverseList[-1]
for j in secondlist[-1][1]:
if j.name != None:
if type(j.start) == int:
firstleg = True
secondleg = True
thirdleg = True
if genrev1:
legpos = convertPosR(
secondlist[-1][0],
maxlength,
width,
(j.start + j.stop) / 2,
aln,
)
else:
legpos = convertPos(
secondlist[-1][0],
maxlength,
width,
(j.start + j.stop) / 2,
aln,
)
for q in range(legpos - 40, legpos + 50):
if q in botlegpos[3]:
firstleg = False
if q in botlegpos[4]:
secondleg = False
if q in botlegpos[5]:
thirdleg = False
if firstleg:
therung = 1
if testbmp.lengthString(j.name[:10], 64) > botlegpos[0]:
botlegpos[0] = testbmp.lengthString(j.name[:10], 64)
for q in range(legpos - 40, legpos + 50):
botlegpos[3].add(q)
# elif secondleg:
# therung = 2
# if testbmp.lengthString(j.name[:10], 64) > botlegpos[1]:
# botlegpos[1] = testbmp.lengthString(j.name[:10], 64)
# for q in range(legpos - 40, legpos + 50):
# botlegpos[4].add(q)
# elif thirdleg:
# therung = 3
# if testbmp.lengthString(j.name[:10], 64) > botlegpos[2]:
# botlegpos[2] = testbmp.lengthString(j.name[:10], 64)
# for q in range(legpos - 40, legpos + 50):
# botlegpos[5].add(q)
else:
therung = None
legendBot.append((j.name[:10], legpos, therung))
else:
firstleg = True
secondleg = True
thirdleg = True
if genrev1:
legpos = convertPosR(
secondlist[-1][0],
maxlength,
width,
(j.start[0] + j.stop[0]) / 2,
aln,
)
else:
legpos = convertPos(
secondlist[-1][0],
maxlength,
width,
(j.start[0] + j.stop[0]) / 2,
aln,
)
for q in range(
(j.start[0] + j.stop[0]) / 2 - 40,
(j.start[0] + j.stop[0]) / 2 + 50,
):
if q in botlegpos[3]:
firstleg = False
if q in botlegpos[4]:
secondleg = False
if q in botlegpos[5]:
thirdleg = False
if firstleg:
therung = 1
if testbmp.lengthString(j.name[:10], 64) > botlegpos[0]:
botlegpos[0] = testbmp.lengthString(j.name[:10], 64)
for q in range(
(j.start[0] + j.stop[0]) / 2 - 40,
(j.start[0] + j.stop[0]) / 2 + 50,
):
botlegpos[3].add(q)
# elif secondleg:
# therung = 2
# if testbmp.lengthString(j.name[:10], 64) > botlegpos[1]:
# botlegpos[1] = testbmp.lengthString(j.name[:10], 64)
# for q in range((j.start[0] + j.stop[0])/2 - 40, (j.start[0] + j.stop[0])/2 + 50):
# botlegpos[4].add(q)
# elif thirdleg:
# therung = 3
# if testbmp.lengthString(j.name[:10], 64) > botlegpos[2]:
# botlegpos[2] = testbmp.lengthString(j.name[:10], 64)
# for q in range((j.start[0] + j.stop[0])/2 - 40, (j.start[0] + j.stop[0])/2 + 50):
# botlegpos[5].add(q)
else:
therung = None
legendBot.append(
(j.name[:10], (j.start[0] + j.stop[0]) / 2, therung)
)
totalheight += sum(botlegpos[:3]) + 40
# creates extra width for blast identity legend
drawfig1hei = 0
if drawfig1 and minident != 101:
drawfig1hei = 500
extraheight = 0
# creates extra height for scale legend
drawfig2hei = 0
if drawfig2:
drawfig2hei = height1 + 70
# creates extra height for graph
totalheight += max([leghei, drawfig1hei, drawfig2hei])
hei = totalheight
if graphit != None:
hei += graphit[3] * len(graphit[0]) + 2 * graphit[7] * len(graphit[0])
extraheight = (graphit[3] + 20) * len(graphit[0])
bmp = BitMap(width, hei + 1)
# draws the scale figure
columnhei = max([leghei, drawfig1hei, drawfig2hei])
if legend == "Single column":
index = 0
legendArrows = []
for i in range(len(legendList)):
x = legendList[i]
x.sort(key=operator.itemgetter(3))
if reverseList[i]:
x.reverse()
legendArrows += x
for i in range(columnhei - 74, 10, -90):
# print len(legendArrows), legendArrows[index][2]
if index < len(legendArrows) and legendArrows[index][2] == "rect":
theColor = Color(
legendArrows[index][1][0],
legendArrows[index][1][1],
legendArrows[index][1][2],
)
bmp.setPenColor(theColor)
bmp.drawOutRect(5, i, 96, 64, genet)
bmp.setPenColor(Color.BLACK)
bmp.writeString(legendArrows[index][0], 106, i, 64)
elif index < len(legendArrows) and legendArrows[index][2] == "arrow":
theColor = Color(
legendArrows[index][1][0],
legendArrows[index][1][1],
legendArrows[index][1][2],
)
bmp.setPenColor(theColor)
bmp.drawRightArrow(5, i, 96, 64, genet)
bmp.setPenColor(Color.BLACK)
bmp.writeString(legendArrows[index][0], 106, i, 64)
elif index < len(legendArrows) and legendArrows[index][2] == "frame":
theColor = Color(
legendArrows[index][1][0],
legendArrows[index][1][1],
legendArrows[index][1][2],
)
bmp.setPenColor(theColor)
bmp.drawRightFrame(5, i - 48, 96, 128, genet, 1)
bmp.setPenColor(Color.BLACK)
bmp.writeString(legendArrows[index][0], 106, i, 64)
elif index < len(legendArrows) and legendArrows[index][2] == "pointer":
theColor = Color(
legendArrows[index][1][0],
legendArrows[index][1][1],
legendArrows[index][1][2],
)
bmp.setPenColor(theColor)
bmp.drawPointer(34, i, 64, genet)
bmp.setPenColor(Color.BLACK)
bmp.writeString(legendArrows[index][0], 106, i, 64)
else:
print ("wang")
index += 1
elif legend == "Two columns":
index = 0
legendArrows = []
for i in range(len(legendList)):
x = legendList[i]
x.sort(key=operator.itemgetter(3))
if reverseList[i]:
x.reverse()
legendArrows += x
for i in range(columnhei - 74, 10, -90):
if index < len(legendArrows) and legendArrows[index][2] == "rect":
theColor = Color(
legendArrows[index][1][0],
legendArrows[index][1][1],
legendArrows[index][1][2],
)
bmp.setPenColor(theColor)
bmp.drawOutRect(5, i, 96, 64, genet)
bmp.setPenColor(Color.BLACK)
bmp.writeString(legendArrows[index][0][:45], 106, i, 64)
elif index < len(legendArrows) and legendArrows[index][2] == "arrow":
theColor = Color(
legendArrows[index][1][0],
legendArrows[index][1][1],
legendArrows[index][1][2],
)
bmp.setPenColor(theColor)
bmp.drawRightArrow(5, i, 96, 64, genet)
bmp.setPenColor(Color.BLACK)
bmp.writeString(legendArrows[index][0][:45], 106, i, 64)
elif index < len(legendArrows) and legendArrows[index][2] == "frame":
theColor = Color(
legendArrows[index][1][0],
legendArrows[index][1][1],
legendArrows[index][1][2],
)
bmp.setPenColor(theColor)
bmp.drawRightFrame(5, i - 48, 96, 128, genet, 1)
bmp.setPenColor(Color.BLACK)
bmp.writeString(legendArrows[index][0][:45], 106, i, 64)
elif index < len(legendArrows) and legendArrows[index][2] == "pointer":
theColor = Color(
legendArrows[index][1][0],
legendArrows[index][1][1],
legendArrows[index][1][2],
)
bmp.setPenColor(theColor)
bmp.drawPointer(34, i, 64, genet)
bmp.setPenColor(Color.BLACK)
bmp.writeString(legendArrows[index][0][:45], 106, i, 64)
index += 1
for i in range(columnhei - 74, 10, -90):
if index < len(legendArrows) and legendArrows[index][2] == "rect":
theColor = Color(
legendArrows[index][1][0],
legendArrows[index][1][1],
legendArrows[index][1][2],
)
bmp.setPenColor(theColor)
bmp.drawOutRect(5 + width / 3, i, 96, 64, genet)
bmp.setPenColor(Color.BLACK)
bmp.writeString(legendArrows[index][0][:45], 106 + width / 3, i, 64)
elif index < len(legendArrows) and legendArrows[index][2] == "arrow":
theColor = Color(
legendArrows[index][1][0],
legendArrows[index][1][1],
legendArrows[index][1][2],
)
bmp.setPenColor(theColor)
bmp.drawRightArrow(5 + width / 3, i, 96, 64, genet)
bmp.setPenColor(Color.BLACK)
bmp.writeString(legendArrows[index][0][:45], 106 + width / 3, i, 64)
elif index < len(legendArrows) and legendArrows[index][2] == "frame":
theColor = Color(
legendArrows[index][1][0],
legendArrows[index][1][1],
legendArrows[index][1][2],
)
bmp.setPenColor(theColor)
bmp.drawRightFrame(5 + width / 3, i - 48, 96, 128, genet, 1)
bmp.setPenColor(Color.BLACK)
bmp.writeString(legendArrows[index][0][:45], 106 + width / 3, i, 64)
elif index < len(legendArrows) and legendArrows[index][2] == "pointer":
theColor = Color(
legendArrows[index][1][0],
legendArrows[index][1][1],
legendArrows[index][1][2],
)
bmp.setPenColor(theColor)
bmp.drawPointer(34 + width / 3, i, 64, genet)
bmp.setPenColor(Color.BLACK)
bmp.writeString(legendArrows[index][0][:45], 106 + width / 3, i, 64)
index += 1
if legend == "Top" or legend == "Top & Bottom":
rung1 = totalheight - sum(toplegpos[:3]) - 30
rung2 = rung1 + toplegpos[0] + 10
rung3 = rung2 + toplegpos[1] + 10
for i in legendTop:
if i[0][0].lower() == i[0][0]:
xpos = i[1] + 24
else:
xpos = i[1] + 32
if i[2] == 1:
bmp.writeString(i[0], xpos, rung1, 64, False, False, 1)
elif i[2] == 2:
bmp.writeString(i[0], xpos, rung2, 64, False, False, 1)
elif i[2] == 3:
bmp.writeString(i[0], xpos, rung3, 64, False, False, 1)
if legend == "Bottom" or legend == "Top & Bottom":
rung1 = sum(botlegpos[:3]) + 30 + columnhei
rung2 = rung1 - botlegpos[0] - 10
rung3 = rung2 - botlegpos[1] - 10
for i in legendBot:
if i[0][-1].lower() == i[0][-1]:
xpos = i[1] + 24
else:
xpos = i[1] + 32
if i[2] == 1:
bmp.writeString(i[0], xpos, rung1, 64, False, False, 1, "right")
elif i[2] == 2:
bmp.writeString(i[0], xpos, rung2, 64, False, False, 1, "right")
elif i[2] == 3:
bmp.writeString(i[0], xpos, rung3, 64, False, False, 1, "right")
if drawfig2 != False:
bmp.setPenColor(Color.BLACK)
x1 = width - 600 - drawfig2 * 1.0 / maxlength * width
x2 = width - 600
bmp.drawLine(x1, columnhei - height1 / 2 - 74, x2, columnhei - height1 / 2 - 74)
bmp.drawLine(
x1, columnhei - height1 / 2 - 1 - 74, x2, columnhei - height1 / 2 - 1 - 74
)
bmp.drawLine(
x1, columnhei - height1 / 2 + 1 - 74, x2, columnhei - height1 / 2 + 1 - 74
)
bmp.drawLine(
x1 - 1,
columnhei - height1 / 4 - 74,
x1 - 1,
columnhei - height1 / 4 * 3 - 74,
)
bmp.drawLine(
x1 + 1,
columnhei - height1 / 4 - 74,
x1 + 1,
columnhei - height1 / 4 * 3 - 74,
)
bmp.drawLine(
x1, columnhei - height1 / 4 - 74, x1, columnhei - height1 / 4 * 3 - 74
)
bmp.drawLine(
x2, columnhei - height1 / 4 - 74, x2, columnhei - height1 / 4 * 3 - 74
)
bmp.drawLine(
x2 + 1,
columnhei - height1 / 4 - 74,
x2 + 1,
columnhei - height1 / 4 * 3 - 74,
)
bmp.drawLine(
x2 - 1,
columnhei - height1 / 4 - 74,
x2 - 1,
columnhei - height1 / 4 * 3 - 74,
)
strfig2 = str(drawfig2)
if strfig2[-6:] == "000000":
strfig2 = strfig2[:-6] + " Mbp"
elif strfig2[-3:] == "000":
strfig2 = strfig2[:-3] + " Kbp"
testbmp = BitMap(10, 10)
bmp.writeString(
strfig2,
(x1 + x2) / 2 - testbmp.lengthString(strfig2, 64) / 2,
columnhei - height1 / 4 - 59,
64,
)
# draws the graph
if graphit != None:
thearray, maxgc, mingc, gheight, glinet, gtype, gmaxy, ggap = graphit
widthpixellist = []
leftpixellist = []
rightpixellist = []
for i in range(len(thearray)):
if aln == "best blast":
shifter = blastmatch[i]
if reverseList[i]:
rightpixel = convertPosR(secondlist[i * 2][0], maxlength, width, 0, aln)
leftpixel = convertPosR(
secondlist[i * 2][0], maxlength, width, secondlist[i * 2][0], aln
)
thearray[i].reverse()
else:
leftpixel = convertPos(secondlist[i * 2][0], maxlength, width, 0, aln)
rightpixel = convertPos(
secondlist[i * 2][0], maxlength, width, secondlist[i * 2][0], aln
)
widthpixel = rightpixel - leftpixel + 1
widthpixellist.append(widthpixel)
leftpixellist.append(leftpixel)
rightpixellist.append(rightpixel)
neg = False
if gmaxy == "Auto":
gmaxy = 0
for i in range(0, len(thearray)):
if min(thearray[i]) < 0:
neg = True
for j in range(0, widthpixellist[i]):
aa = int(j * (len(thearray[i]) * 1.0 / widthpixellist[i]))
bb = int((j + 1) * (len(thearray[i]) * 1.0 / widthpixellist[i]))
if aa == bb:
bb += 1
temparr = thearray[i][aa:bb]
gyval = abs(sum(temparr) * 1.0 / len(temparr))
if gyval > gmaxy:
gmaxy = gyval
else:
gmaxy = float(gmaxy)
for i in range(0, len(thearray)):
if min(thearray[i]) < 0:
neg = True
if neg:
axispos = hei - ggap - gheight / 2 - glinet / 2
else:
axispos = hei - ggap - gheight - glinet
gc1, gc2, gc3 = maxgc
maxgcColour = Color(gc1, gc2, gc3)
bmp.setPenColor(maxgcColour)
gc1, gc2, gc3 = mingc
mingcColour = Color(gc1, gc2, gc3)
for qq in range(len(thearray)):
bmp.setPenColor(Color.BLACK)
lastgypos = None
for i in range(axispos, axispos + glinet):
bmp.drawLine(leftpixellist[qq], i, rightpixellist[qq], i)
bmp.setPenColor(maxgcColour)
for i in range(0, widthpixellist[qq]):
aa = int(i * (len(thearray[qq]) * 1.0 / widthpixellist[qq]))
bb = int((i + 1) * (len(thearray[qq]) * 1.0 / widthpixellist[qq]))
if aa == bb:
bb += 1
temparr = thearray[qq][aa:bb]
gyval = sum(temparr) * 1.0 / len(temparr)
yvalpixratio = gyval / gmaxy
if yvalpixratio > 1:
yvalpixratio = 1
if yvalpixratio < -1:
yvalpixratio = -1
if neg:
if yvalpixratio < 0:
gc1, gc2, gc3 = mingc
bmp.setPenColor(mingcColour)
yvalpix = round(yvalpixratio * (gheight / 2 - glinet / 2))
if gtype == "Line":
if lastgypos != None:
bmp.drawLine(
leftpixellist[qq] + i - 1,
lastgypos,
leftpixellist[qq] + i,
axispos + yvalpix,
)
lastgypos = axispos + yvalpix
elif gtype == "Histogram":
bmp.drawLine(
leftpixellist[qq] + i,
axispos - 1,
leftpixellist[qq] + i,
axispos + yvalpix,
)
else:
gc1, gc2, gc3 = maxgc
yvalpix = round(
yvalpixratio * (gheight / 2 - (glinet - glinet / 2))
)
bmp.setPenColor(maxgcColour)
if gtype == "Line":
if lastgypos != None:
bmp.drawLine(
leftpixellist[qq] + i - 1,
lastgypos,
leftpixellist[qq] + i,
axispos + glinet + yvalpix,
)
lastgypos = axispos + glinet + yvalpix
elif gtype == "Histogram" and round(yvalpix) != 0.0:
bmp.drawLine(
leftpixellist[qq] + i,
axispos + glinet,
leftpixellist[qq] + i,
axispos + yvalpix,
)
else:
yvalpix = round(yvalpixratio * (gheight - glinet))
if gtype == "Line":
if lastgypos != None:
bmp.drawLine(
leftpixellist[qq] + i - 1,
lastgypos,
i,
leftpixellist[qq] + axispos + glinet + yvalpix,
)
lastgypos = axispos + glinet + 1 + yvalpix
elif gtype == "Histogram" and round(yvalpix) != 0.0:
bmp.drawLine(
leftpixellist[qq] + i,
axispos + glinet,
leftpixellist[qq] + i,
axispos + glinet + yvalpix,
)
axispos -= gheight + 2 * ggap + height1 + height2
modfig1 = (graphit[3] + 2 * ggap) * len(graphit[0])
else:
modfig1 = 0
# draws the blast gradient legend
if drawfig1 and minident != 101:
bmp.setPenColor(Color.BLACK)
bmp.writeString(
str(int(round(minident))) + "%", width - 300, columnhei - 480, 64
)
bmp.writeString("100%", width - 300, columnhei - 84, 64)
for i in range(columnhei - 480, columnhei - 20):
ratio = round((i - (columnhei - 480) * 1.0) / 460, 2)
r1 = int(minblastc[0] * (1 - ratio) + maxblastc[0] * ratio)
r2 = int(minblastc[1] * (1 - ratio) + maxblastc[1] * ratio)
r3 = int(minblastc[2] * (1 - ratio) + maxblastc[2] * ratio)
theColor = Color(r1, r2, r3)
bmp.setPenColor(theColor)
bmp.drawLine(width - 400, i, width - 360, i)
r1 = int(minblastci[0] * (1 - ratio) + maxblastci[0] * ratio)
r2 = int(minblastci[1] * (1 - ratio) + maxblastci[1] * ratio)
r3 = int(minblastci[2] * (1 - ratio) + maxblastci[2] * ratio)
theColor = Color(r1, r2, r3)
bmp.setPenColor(theColor)
bmp.drawLine(width - 360, i, width - 320, i)
# draws feature and blast figures
for i in range(0, len(secondlist)):
# draws the blast figure
if i % 2 == 0:
if aln == "best blast":
shifter = blastmatch[int(i / 2)]
genrev1 = reverseList[int(i / 2)]
ymod = totalheight - (height1 * i / 2 + height2 * i / 2) - height1
if graphit != None and len(thearray) > 1:
ymod += (gheight + 2 * ggap) * (len(thearray) - i / 2 - 1)
if legend == "Top" or legend == "Top & Bottom":
ymod -= sum(toplegpos[:3]) + 40
length = secondlist[i][0]
bmp.setPenColor(Color.BLACK)
jj = height1 / 2 + glt / 2
for j in range(glt):
bmp.drawLine(
convertPos(length, maxlength, width, 0, aln),
(ymod + jj),
convertPos(length, maxlength, width, length, aln),
(ymod + jj),
)
jj -= 1
bmp.setPenColor(Color.RED)
for j in secondlist[i][1]:
if abortCaptain:
return None
if (j.strand == "+" and not genrev1) or (j.strand == "-" and genrev1):
theColor = Color(j.colour[0], j.colour[1], j.colour[2])
bmp.setPenColor(theColor)
if type(j.start) == int:
if genrev1:
x2 = convertPosR(length, maxlength, width, j.start, aln)
x1 = convertPosR(length, maxlength, width, j.stop, aln)
else:
x1 = convertPos(length, maxlength, width, j.start, aln)
x2 = convertPos(length, maxlength, width, j.stop, aln)
if featDict[j.type][0] == "rect":
bmp.drawOutRect(x1, ymod, x2 - x1, height1, genet)
elif featDict[j.type][0] == "arrow":
bmp.drawRightArrow(x1, ymod, x2 - x1, height1, genet)
elif featDict[j.type][0] == "frame":
bmp.drawRightFrame(
x1, ymod, x2 - x1, height1, genet, j.start % 3
)
elif featDict[j.type][0] == "pointer":
bmp.drawPointer(x1, ymod, height1, genet)
else:
if genrev1:
x2 = convertPosR(length, maxlength, width, j.start[-1], aln)
x1 = convertPosR(length, maxlength, width, j.stop[-1], aln)
else:
x1 = convertPos(length, maxlength, width, j.start[-1], aln)
x2 = convertPos(length, maxlength, width, j.stop[-1], aln)
if featDict[j.type][0] == "rect":
bmp.drawOutRect(x1, ymod, x2 - x1, height1, genet)
elif featDict[j.type][0] == "arrow":
bmp.drawRightArrow(x1, ymod, x2 - x1, height1, genet)
elif featDict[j.type][0] == "frame":
bmp.drawRightFrame(
x1, ymod, x2 - x1, height1, genet, j.start[-1] % 3
)
elif featDict[j.type][0] == "pointer":
bmp.drawPointer(x1, ymod, height1, genet)
for k in range(2, len(j.start) + 1):
if genrev1:
x4 = convertPosR(
length, maxlength, width, j.start[-k], aln
)
x3 = convertPosR(
length, maxlength, width, j.stop[-k], aln
)
else:
x3 = convertPos(
length, maxlength, width, j.start[-k], aln
)
x4 = convertPos(
length, maxlength, width, j.stop[-k], aln
)
if (
featDict[j.type][0] == "arrow"
or featDict[j.type][0] == "rect"
):
if x1 - x4 > 2:
bmp.setPenColor(Color.BLACK)
bmp.drawDash(
x4,
ymod + 3 * height1 / 4,
x4,
ymod + height1,
exont,
)
bmp.drawDash(
x4, ymod + height1, x1, ymod + height1, exont
)
bmp.drawDash(
x1,
ymod + height1,
x1,
ymod + 3 * height1 / 4,
exont,
)
bmp.setPenColor(theColor)
bmp.drawOutRect(
x3, ymod + height1 / 4, x4 - x3, height1 / 2, genet
)
elif featDict[j.type][0] == "frame":
if x1 - x4 > 2:
bmp.setPenColor(Color.BLACK)
bmp.drawDash(
x4,
ymod + 3 * height1 / 4,
x4,
ymod + height1,
exont,
)
bmp.drawDash(
x4, ymod + height1, x1, ymod + height1, exont
)
bmp.drawDash(
x1,
ymod + height1,
x1,
ymod + 3 * height1 / 4,
exont,
)
bmp.setPenColor(theColor)
bmp.drawRightFrameRect(
x3, ymod, x4 - x3, height1, genet, j.start[-k] % 3
)
x1, x2 = x3, x4
else:
theColor = Color(j.colour[0], j.colour[1], j.colour[2])
bmp.setPenColor(theColor)
if type(j.start) == int:
if genrev1:
x2 = convertPosR(length, maxlength, width, j.start, aln)
x1 = convertPosR(length, maxlength, width, j.stop, aln)
else:
x1 = convertPos(length, maxlength, width, j.start, aln)
x2 = convertPos(length, maxlength, width, j.stop, aln)
if featDict[j.type][0] == "rect":
bmp.drawOutRect(x1, ymod, x2 - x1, height1, genet)
elif featDict[j.type][0] == "arrow":
bmp.drawLeftArrow(x1, ymod, x2 - x1, height1, genet)
elif featDict[j.type][0] == "frame":
bmp.drawLeftFrame(
x1, ymod, x2 - x1, height1, genet, j.stop % 3
)
elif featDict[j.type][0] == "pointer":
bmp.drawPointer(x2, ymod, height1, genet)
else:
if genrev1:
x2 = convertPosR(length, maxlength, width, j.start[0], aln)
x1 = convertPosR(length, maxlength, width, j.stop[0], aln)
else:
x1 = convertPos(length, maxlength, width, j.start[0], aln)
x2 = convertPos(length, maxlength, width, j.stop[0], aln)
if featDict[j.type][0] == "rect":
bmp.drawOutRect(x1, ymod, x2 - x1, height1, genet)
elif featDict[j.type][0] == "arrow":
bmp.drawLeftArrow(x1, ymod, x2 - x1, height1, genet)
elif featDict[j.type][0] == "frame":
bmp.drawLeftFrame(
x1, ymod, x2 - x1, height1, genet, j.stop[0] % 3
)
elif featDict[j.type][0] == "pointer":
bmp.drawPointer(x2, ymod, height1, genet)
for k in range(1, len(j.start)):
if genrev1:
x4 = convertPosR(
length, maxlength, width, j.start[k], aln
)
x3 = convertPosR(
length, maxlength, width, j.stop[k], aln
)
else:
x3 = convertPos(
length, maxlength, width, j.start[k], aln
)
x4 = convertPos(
length, maxlength, width, j.stop[k], aln
)
if (
featDict[j.type][0] == "rect"
or featDict[j.type][0] == "arrow"
):
if x3 - x2 > 2:
bmp.setPenColor(Color.BLACK)
bmp.drawDash(
x2,
ymod + 3 * height1 / 4,
x2,
ymod + height1,
exont,
)
bmp.drawDash(
x2, ymod + height1, x3, ymod + height1, exont
)
bmp.drawDash(
x3,
ymod + height1,
x3,
ymod + 3 * height1 / 4,
exont,
)
bmp.setPenColor(theColor)
bmp.drawOutRect(
x3, ymod + height1 / 4, x4 - x3, height1 / 2, genet
)
elif featDict[j.type][0] == "frame":
if x3 - x2 > 2:
bmp.setPenColor(Color.BLACK)
bmp.drawDash(
x2, ymod + height1 / 4, x2, ymod, exont
)
bmp.drawDash(x2, ymod, x3, ymod, exont)
bmp.drawDash(
x3, ymod, x3, ymod + height1 / 4, exont
)
bmp.setPenColor(theColor)
bmp.drawLeftFrameRect(
x3, ymod, x4 - x3, height1, genet, j.stop[k] % 3
)
x1, x2 = x3, x4
else:
# draws teh blast hits
genrev2 = reverseList[int((i + 1) / 2)]
length1 = secondlist[i - 1][0]
length2 = secondlist[i + 1][0]
ymod = (
totalheight
- (height1 * (i - 1) / 2 + height2 * (i - 1) / 2)
- height1
- 1
)
if graphit != None and len(thearray) > 1:
ymod += (gheight + 2 * ggap) * (len(thearray) - i / 2 - 1)
if legend == "Top" or legend == "Top & Bottom":
ymod -= sum(toplegpos[:3]) + 40
y1 = ymod
y2 = y1 - height2 + 1
for j in secondlist[i]:
if abortCaptain:
return None
qStart, qEnd, rStart, rEnd, ident = j
# is the blast hit inverted
if (
(rStart < rEnd and not genrev1 and not genrev2)
or (rStart > rEnd and not genrev1 and genrev2)
or (rStart < rEnd and genrev1 and genrev2)
or (rStart > rEnd and genrev1 and not genrev2)
):
crisscross = False
else:
crisscross = True
try:
ratio = round((ident - minident) / (100 - minident), 2)
except:
ratio = 1
if crisscross:
r1 = int(minblastci[0] * (1 - ratio) + maxblastci[0] * ratio)
r2 = int(minblastci[1] * (1 - ratio) + maxblastci[1] * ratio)
r3 = int(minblastci[2] * (1 - ratio) + maxblastci[2] * ratio)
else:
r1 = int(minblastc[0] * (1 - ratio) + maxblastc[0] * ratio)
r2 = int(minblastc[1] * (1 - ratio) + maxblastc[1] * ratio)
r3 = int(minblastc[2] * (1 - ratio) + maxblastc[2] * ratio)
theColor = Color(r1, r2, r3)
bmp.setPenColor(theColor)
if aln == "best blast":
shifter = blastmatch[int(i / 2)]
if genrev1:
x1e = convertPosR(length1, maxlength, width, qStart, aln)
x1s = convertPosR(length1, maxlength, width, qEnd, aln)
else:
x1s = convertPos(length1, maxlength, width, qStart, aln)
x1e = convertPos(length1, maxlength, width, qEnd, aln)
if aln == "best blast":
shifter = blastmatch[int((i + 1) / 2)]
if genrev2 and rStart < rEnd:
x2e = convertPosR(length2, maxlength, width, rStart, aln)
x2s = convertPosR(length2, maxlength, width, rEnd, aln)
elif genrev2 and rStart >= rEnd:
x2s = convertPosR(length2, maxlength, width, rStart, aln)
x2e = convertPosR(length2, maxlength, width, rEnd, aln)
elif not genrev2 and rStart < rEnd:
x2s = convertPos(length2, maxlength, width, rStart, aln)
x2e = convertPos(length2, maxlength, width, rEnd, aln)
else:
x2e = convertPos(length2, maxlength, width, rStart, aln)
x2s = convertPos(length2, maxlength, width, rEnd, aln)
if crisscross:
if x1e - x1s >= x2e - x2s:
for k in range(x1s, x1e):
try:
x2 = x2e - (k - x1s) * 1.0 / (x1e - x1s) * (x2e - x2s)
bmp.drawLine(k, y1, x2, y2)
except:
pass
else:
for k in range(x2s, x2e):
x1 = x1e - (k - x2s) * 1.0 / (x2e - x2s) * (x1e - x1s)
bmp.drawLine(x1, y1, k, y2)
if blastoutline:
bmp.setPenColor(Color.BLACK)
bmp.drawLine(x1s, y1, x2e, y2)
bmp.drawLine(x1e, y1, x2s, y2)
else:
if x1e - x1s >= x2e - x2s:
for k in range(x1s, x1e):
try:
x2 = (k - x1s) * 1.0 / (x1e - x1s) * (x2e - x2s) + x2s
bmp.drawLine(k, y1, x2, y2)
bmp.drawLine(k + 1, y1, x2, y2)
except:
pass
else:
for k in range(x2s, x2e):
x1 = (k - x2s) * 1.0 / (x2e - x2s) * (x1e - x1s) + x1s
bmp.drawLine(x1, y1, k, y2)
bmp.drawLine(x1, y1, k + 1, y2)
if blastoutline:
bmp.setPenColor(Color.BLACK)
bmp.drawLine(x1s, y1, x2s, y2)
bmp.drawLine(x1e, y1, x2e, y2)
if writebmp == 0:
bmp.saveFile(filename, compress)
return minident
elif writebmp == 1:
return bmp.createGIFString(True), minident, bmp.wd, bmp.ht
elif writebmp == 2:
return bmp.createGIFString(False), minident, bmp.wd, bmp.ht
def drawsvg(
filename,
minlength,
mineval,
minIdent,
inputlist,
width,
height1,
height2,
minblastc,
maxblastc,
minblastci,
maxblastci,
drawfig1,
drawfig2,
drawfig3,
compress,
reverseList,
featDict,
glt,
exont,
genet,
featlengths,
aln,
graphit,
blastoutline,
minmaxlist,
autodetect,
legend,
legname,
):
# global variable for stopping script midway
global abortCaptain
secondlist = []
maxlength = 0
totalheight = 0
# returning a minident value of 101 means the script has been aborted
minident = 101
# gets feature file and blast information
for i in range(0, len(inputlist)):
if i % 2 == 0:
temp = getArrows(inputlist[i], legname)
thirdlist = []
if minmaxlist[int(i / 2)][1] == "Max":
if temp[0] == None:
maxcut = featlengths[int(i / 2)]
else:
maxcut = temp[0]
if minmaxlist[int(i / 2)][0] == 1:
minmaxopt = 0
else:
minmaxopt = 1
mincut = minmaxlist[int(i / 2)][0]
else:
mincut = minmaxlist[int(i / 2)][0]
maxcut = minmaxlist[int(i / 2)][1]
if minmaxlist[int(i / 2)][0] < minmaxlist[int(i / 2)][1]:
minmaxopt = 1
else:
minmaxopt = 2
for j in temp[1]:
if j.type in featDict:
if j.colour == None:
j.colour = featDict[j.type][1]
if minmaxopt == 0:
thirdlist.append(j)
elif minmaxopt == 1:
if type(j.start) == int:
if j.start >= mincut and j.stop <= maxcut:
aninstance = feature(
j.start - mincut + 1,
j.stop - mincut + 1,
j.type,
j.strand,
j.colour,
j.name,
)
thirdlist.append(aninstance)
else:
if j.start[0] >= mincut and j.stop[-1] <= maxcut:
tempstart = []
for k in j.start:
tempstart.append(k - mincut + 1)
tempstop = []
for k in j.stop:
tempstop.append(k - mincut + 1)
aninstance = feature(
tempstart,
tempstop,
j.type,
j.strand,
j.colour,
j.name,
)
thirdlist.append(aninstance)
elif minmaxopt == 2:
if temp[0] == None:
templength = featlength[int(i / 2)]
else:
templength = temp[0]
if type(j.start) == int:
if j.stop <= maxcut:
tempstart = j.start + templength - mincut + 1
tempstop = j.stop + templength - mincut + 1
aninstance = feature(
tempstart,
tempstop,
j.type,
j.strand,
j.colour,
j.name,
)
thirdlist.append(aninstance)
elif j.start >= mincut:
tempstart = j.start - mincut + 1
tempstop = j.stop - mincut + 1
aninstance = feature(
tempstart,
tempstop,
j.type,
j.strand,
j.colour,
j.name,
)
thirdlist.append(aninstance)
else:
if j.stop[-1] <= maxcut:
tempstart = []
for k in j.start:
tempstart.append(k + templength - mincut + 1)
tempstop = []
for k in j.stop:
tempstop.append(k + templength - mincut + 1)
aninstance = feature(
tempstart,
tempstop,
j.type,
j.strand,
j.colour,
j.name,
)
thirdlist.append(aninstance)
elif j.start[0] >= mincut:
tempstart = []
for k in j.start:
tempstart.append(k - mincut + 1)
tempstop = []
for k in j.stop:
tempstop.append(k - mincut + 1)
aninstance = feature(
tempstart,
tempstop,
j.type,
j.strand,
j.colour,
j.name,
)
thirdlist.append(aninstance)
thirdlist.sort(key=lambda i: i.length(), reverse=True)
if minmaxopt == 0:
if temp[0] == None:
secondlist.append((featlengths[int(i / 2)], thirdlist))
if featlengths[int(i / 2)] > maxlength:
maxlength = featlengths[int(i / 2)]
else:
secondlist.append((temp[0], thirdlist))
if temp[0] >= maxlength:
maxlength = temp[0]
elif minmaxopt == 1:
if maxcut == "Max":
maxcut = temp[0]
secondlist.append((maxcut - mincut + 1, thirdlist))
if maxcut - mincut + 1 > maxlength:
maxlength = maxcut - mincut + 1
elif minmaxopt == 2:
if temp[0] == None:
templength = featlengths[int(i / 2)]
else:
templength = temp[0]
secondlist.append((templength - mincut + maxcut + 1, thirdlist))
if templength - mincut + maxcut + 1 > maxlength:
maxlength = templength - mincut + maxlength + 1
totalheight += height1
else:
totalheight += height2
temp = getBlast(inputlist[i], minlength, mineval, minIdent)
for j in temp:
if j[4] < minident:
minident = j[4]
secondlist.append(temp)
# calculates offsets for genomes if best blast alignment is selected
if autodetect and maxlength > 100000:
tempsecond = []
minident = 101
for i in range(len(secondlist)):
temp = []
if i % 2 == 0:
for j in secondlist[i][1]:
if type(j.start) == int:
if (j.stop - j.start) * 1.0 / maxlength * width > 4:
temp.append(j)
else:
if (j.stop[0] - j.start[0]) * 1.0 / maxlength * width > 4:
temp.append(j)
tempsecond.append((secondlist[i][0], temp))
else:
for j in secondlist[i]:
if (j[1] - j[0]) * 1.0 / maxlength * width > 3:
temp.append(j)
if j[4] < minident:
minident = j[4]
tempsecond.append(temp)
secondlist = tempsecond
if minIdent != 0:
minident = minIdent
if aln == "best blast":
blastmatch = [0]
for i in range(1, len(secondlist), 2):
maxbitscore = 0
for j in secondlist[i]:
if j[1] - j[0] > maxbitscore:
qstart, qend, rstart, rend = j[0], j[1], j[2], j[3]
maxbitscore = j[1] - j[0]
if len(secondlist[i]) == 0:
theQstart = 0
elif reverseList[int(i / 2)]:
theQstart = secondlist[i - 1][0] - qend
else:
theQstart = qstart
if reverseList[int((i + 1) / 2)]:
if len(secondlist[i]) == 0:
theRstart = 0
elif rstart < rend:
theRstart = secondlist[i + 1][0] - rend
else:
theRstart = secondlist[i + 1][0] - rstart
else:
if len(secondlist[i]) == 0:
theRstart = 0
elif rstart < rend:
theRstart = rstart
else:
theRstart = rend
blastmatch.append(blastmatch[-1] + theQstart - theRstart)
theminblast = min(blastmatch)
templist = []
for i in blastmatch:
templist.append(i - theminblast)
blastmatch = templist
for i in range(0, len(secondlist) + 1, 2):
if secondlist[i][0] + blastmatch[int(i / 2)] > maxlength:
maxlength = secondlist[i][0] + blastmatch[int(i / 2)]
fighei = 0
if legend == "Single column" or legend == "Two columns":
legendArrows = set()
legendList = []
for i in range(len(secondlist)):
if i % 2 == 0:
legendList.append([])
for j in secondlist[i][1]:
if (
j.name != None
and (j.name, j.colour, featDict[j.type][0]) not in legendArrows
):
legendArrows.add((j.name, j.colour, featDict[j.type][0]))
if type(j.start) == int:
tempjstart = j.start
else:
tempjstart = j.start[0]
legendList[int(i / 2)].append(
(j.name, j.colour, featDict[j.type][0], tempjstart)
)
if legend == "Single column":
fighei = min([5000, len(legendArrows) * 90])
elif legend == "Two columns":
fighei = min([5000, (len(legendArrows) + 1) / 2 * 90])
global shifter
if legend == "Top" or legend == "Top & Bottom":
toplegpos = [0, 0, 0, set(), set(), set()]
legendTop = []
testbmp = BitMap(10, 10)
if aln == "best blast":
shifter = blastmatch[0]
genrev1 = reverseList[0]
for j in secondlist[0][1]:
if j.name != None:
if type(j.start) == int:
firstleg = True
secondleg = True
thirdleg = True
if genrev1:
legpos = convertPosR(
secondlist[0][0],
maxlength,
width,
(j.start + j.stop) / 2,
aln,
)
else:
legpos = convertPos(
secondlist[0][0],
maxlength,
width,
(j.start + j.stop) / 2,
aln,
)
for q in range(legpos - 40, legpos + 50):
if q in toplegpos[3]:
firstleg = False
if q in toplegpos[4]:
secondleg = False
if q in toplegpos[5]:
thirdleg = False
if firstleg:
therung = 1
if testbmp.lengthString(j.name[:10], 64) > toplegpos[0]:
toplegpos[0] = testbmp.lengthString(j.name[:10], 64)
for q in range(legpos - 40, legpos + 50):
toplegpos[3].add(q)
# elif secondleg:
# therung = 2
# if testbmp.lengthString(j.name[:10], 64) > toplegpos[1]:
# toplegpos[1] = testbmp.lengthString(j.name[:10], 64)
# for q in range(legpos - 40, legpos + 50):
# toplegpos[4].add(q)
# elif thirdleg:
# therung = 3
# if testbmp.lengthString(j.name[:10], 64) > toplegpos[2]:
# toplegpos[2] = testbmp.lengthString(j.name[:10], 64)
# for q in range(legpos - 40, legpos + 50):
# toplegpos[5].add(q)
else:
therung = None
legendTop.append((j.name[:10], legpos, therung))
else:
firstleg = True
secondleg = True
thirdleg = True
if genrev1:
legpos = convertPosR(
secondlist[0][0],
maxlength,
width,
(j.start[0] + j.stop[0]) / 2,
aln,
)
else:
legpos = convertPos(
secondlist[0][0],
maxlength,
width,
(j.start[0] + j.stop[0]) / 2,
aln,
)
for q in range(legpos - 40, legpos + 50):
if q in toplegpos[3]:
firstleg = False
if q in toplegpos[4]:
secondleg = False
if q in toplegpos[5]:
thirdleg = False
if firstleg:
therung = 1
if testbmp.lengthString(j.name[:10], 64) > toplegpos[0]:
toplegpos[0] = testbmp.lengthString(j.name[:10], 64)
for q in range(legpos - 40, legpos + 50):
toplegpos[3].add(q)
# elif secondleg:
# therung = 2
# if testbmp.lengthString(j.name[:10], 64) > toplegpos[1]:
# toplegpos[1] = testbmp.lengthString(j.name[:10], 64)
# for q in range(legpos - 40, legpos + 50):
# toplegpos[4].add(q)
# elif thirdleg:
# therung = 3
# if testbmp.lengthString(j.name[:10], 64) > toplegpos[2]:
# toplegpos[2] = testbmp.lengthString(j.name[:10], 64)
# for q in range(legpos - 40, legpos + 50):
# toplegpos[5].add(q)
else:
therung = None
legendTop.append((j.name[:10], legpos, therung))
totalheight += sum(toplegpos[:3]) + 40
if legend == "Bottom" or legend == "Top & Bottom":
botlegpos = [0, 0, 0, set(), set(), set()]
legendBot = []
testbmp = BitMap(10, 10)
if aln == "best blast":
shifter = blastmatch[-1]
genrev1 = reverseList[-1]
if aln == "best blast":
shifter = blastmatch[-1]
genrev1 = reverseList[-1]
for j in secondlist[-1][1]:
if j.name != None:
if type(j.start) == int:
firstleg = True
secondleg = True
thirdleg = True
if genrev1:
legpos = convertPosR(
secondlist[-1][0],
maxlength,
width,
(j.start + j.stop) / 2,
aln,
)
else:
legpos = convertPos(
secondlist[-1][0],
maxlength,
width,
(j.start + j.stop) / 2,
aln,
)
for q in range(legpos - 40, legpos + 50):
if q in botlegpos[3]:
firstleg = False
if q in botlegpos[4]:
secondleg = False
if q in botlegpos[5]:
thirdleg = False
if firstleg:
therung = 1
if testbmp.lengthString(j.name[:10], 64) > botlegpos[0]:
botlegpos[0] = testbmp.lengthString(j.name[:10], 64)
for q in range(legpos - 40, legpos + 50):
botlegpos[3].add(q)
# elif secondleg:
# therung = 2
# if testbmp.lengthString(j.name[:10], 64) > botlegpos[1]:
# botlegpos[1] = testbmp.lengthString(j.name[:10], 64)
# for q in range(legpos - 40, legpos + 50):
# botlegpos[4].add(q)
# elif thirdleg:
# therung = 3
# if testbmp.lengthString(j.name[:10], 64) > botlegpos[2]:
# botlegpos[2] = testbmp.lengthString(j.name[:10], 64)
# for q in range(legpos - 40, legpos + 50):
# botlegpos[5].add(q)
else:
therung = None
legendBot.append((j.name[:10], legpos, therung))
else:
firstleg = True
secondleg = True
thirdleg = True
if genrev1:
legpos = convertPosR(
secondlist[-1][0],
maxlength,
width,
(j.start[0] + j.stop[0]) / 2,
aln,
)
else:
legpos = convertPos(
secondlist[-1][0],
maxlength,
width,
(j.start[0] + j.stop[0]) / 2,
aln,
)
for q in range(
(j.start[0] + j.stop[0]) / 2 - 40,
(j.start[0] + j.stop[0]) / 2 + 50,
):
if q in botlegpos[3]:
firstleg = False
if q in botlegpos[4]:
secondleg = False
if q in botlegpos[5]:
thirdleg = False
if firstleg:
therung = 1
if testbmp.lengthString(j.name[:10], 64) > botlegpos[0]:
botlegpos[0] = testbmp.lengthString(j.name[:10], 64)
for q in range(
(j.start[0] + j.stop[0]) / 2 - 40,
(j.start[0] + j.stop[0]) / 2 + 50,
):
botlegpos[3].add(q)
# elif secondleg:
# therung = 2
# if testbmp.lengthString(j.name[:10], 64) > botlegpos[1]:
# botlegpos[1] = testbmp.lengthString(j.name[:10], 64)
# for q in range((j.start[0] + j.stop[0])/2 - 40, (j.start[0] + j.stop[0])/2 + 50):
# botlegpos[4].add(q)
# elif thirdleg:
# therung = 3
# if testbmp.lengthString(j.name[:10], 64) > botlegpos[2]:
# botlegpos[2] = testbmp.lengthString(j.name[:10], 64)
# for q in range((j.start[0] + j.stop[0])/2 - 40, (j.start[0] + j.stop[0])/2 + 50):
# botlegpos[5].add(q)
else:
therung = None
legendBot.append(
(j.name[:10], (j.start[0] + j.stop[0]) / 2, therung)
)
totalheight += sum(botlegpos[:3]) + 40
# creates extra width for blast identity legend
drawfig1hei = 0
if drawfig1 and minident != 101:
drawfig1hei = 500
extraheight = 0
# creates extra height for scale legend
drawfig2hei = 0
if drawfig2:
drawfig2hei = height1
columnhei = max([fighei, drawfig1hei, drawfig2hei])
totalheight += columnhei
hei = totalheight
# creates extra height for graph
if graphit != None:
hei += graphit[3] * len(graphit[0]) + 2 * graphit[7] * len(graphit[0])
extraheight = (graphit[3] + 20) * len(graphit[0])
svg = scalableVectorGraphics(hei + 1, width)
if legend == "Single column":
index = 0
legendArrows = []
for i in range(len(legendList)):
x = legendList[i]
x.sort(key=operator.itemgetter(3))
if reverseList[i]:
x.reverse()
legendArrows += x
for i in range(hei - columnhei + 74, hei, 90):
if index < len(legendArrows) and legendArrows[index][2] == "rect":
svg.drawOutRect(5, i - 64, 96, 64, legendArrows[index][1], genet)
svg.writeString(legendArrows[index][0], 106, i, 64)
elif index < len(legendArrows) and legendArrows[index][2] == "arrow":
svg.drawRightArrow(
5, i - 64, 96, 64, legendArrows[index][1], (0, 0, 0), genet
)
svg.writeString(legendArrows[index][0], 106, i, 64)
elif index < len(legendArrows) and legendArrows[index][2] == "frame":
svg.drawRightFrame(5, i - 16, 96, 128, genet, 1, legendArrows[index][1])
svg.writeString(legendArrows[index][0], 106, i, 64)
elif index < len(legendArrows) and legendArrows[index][2] == "pointer":
svg.drawPointer(34, i - 64, 64, genet, legendArrows[index][1])
svg.writeString(legendArrows[index][0], 106, i, 64)
index += 1
elif legend == "Two columns":
index = 0
legendArrows = []
for i in range(len(legendList)):
x = legendList[i]
x.sort(key=operator.itemgetter(3))
if reverseList[i]:
x.reverse()
legendArrows += x
for i in range(hei - columnhei + 74, hei, 90):
if index < len(legendArrows) and legendArrows[index][2] == "rect":
svg.drawOutRect(5, i - 64, 96, 64, legendArrows[index][1], genet)
svg.writeString(legendArrows[index][0][:45], 106, i, 64)
elif index < len(legendArrows) and legendArrows[index][2] == "arrow":
svg.writeString(legendArrows[index][0][:45], 106, i, 64)
svg.drawRightArrow(
5, i - 64, 96, 64, legendArrows[index][1], (0, 0, 0), genet
)
elif index < len(legendArrows) and legendArrows[index][2] == "frame":
svg.drawRightFrame(5, i - 16, 96, 128, genet, 1, legendArrows[index][1])
svg.writeString(legendArrows[index][0][:45], 106, i, 64)
elif index < len(legendArrows) and legendArrows[index][2] == "pointer":
svg.drawPointer(34, i - 64, 64, genet, legendArrows[index][1])
svg.writeString(legendArrows[index][0][:45], 76, i, 64)
index += 1
for i in range(hei - columnhei + 74, hei, 90):
if index < len(legendArrows) and legendArrows[index][2] == "rect":
svg.drawOutRect(
5 + width / 3, i - 64, 96, 64, legendArrows[index][1], genet
)
svg.writeString(legendArrows[index][0][:45], 106 + width / 3, i, 64)
elif index < len(legendArrows) and legendArrows[index][2] == "arrow":
svg.drawRightArrow(
5 + width / 3,
i - 64,
96,
64,
legendArrows[index][1],
(0, 0, 0),
genet,
)
svg.writeString(legendArrows[index][0][:45], 106 + width / 3, i, 64)
elif index < len(legendArrows) and legendArrows[index][2] == "frame":
svg.drawRightFrame(
5 + width / 3, i - 16, 96, 128, genet, 1, legendArrows[index][1]
)
svg.writeString(legendArrows[index][0][:45], 106 + width / 3, i, 64)
elif index < len(legendArrows) and legendArrows[index][2] == "pointer":
svg.drawPointer(
34 + width / 3, i - 64, 64, genet, legendArrows[index][1]
)
svg.writeString(legendArrows[index][0][:45], 76 + width / 3, i, 64)
index += 1
if legend == "Top" or legend == "Top & Bottom":
rung1 = sum(toplegpos[:3]) + 30
rung2 = rung1 - toplegpos[0] - 10
rung3 = rung2 - toplegpos[1] - 10
for i in legendTop:
if i[0][0].lower() == i[0][0]:
xpos = i[1] + 16
else:
xpos = i[1] + 32
if i[2] == 1:
svg.writeString(i[0], xpos, rung1, 64, False, False, 1)
elif i[2] == 2:
svg.writeString(i[0], xpos, rung2, 64, False, False, 1)
elif i[2] == 3:
svg.writeString(i[0], xpos, rung3, 64, False, False, 1)
if legend == "Bottom" or legend == "Top & Bottom":
rung1 = hei - sum(botlegpos[:3]) - 30 - columnhei
rung2 = rung1 + botlegpos[0] + 10
rung3 = rung2 + botlegpos[1] + 10
for i in legendBot:
if i[0][-1].lower() == i[0][-1]:
xpos = i[1] + 16
else:
xpos = i[1] + 32
if i[2] == 1:
svg.writeString(i[0], xpos, rung1, 64, False, False, 1, "right")
elif i[2] == 2:
svg.writeString(i[0], xpos, rung2, 64, False, False, 1, "right")
elif i[2] == 3:
svg.writeString(i[0], xpos, rung3, 64, False, False, 1, "right")
# draws the scale figure
if drawfig2 != False:
testbmp = BitMap(5, 5)
x1 = width - 600 - drawfig2 * 1.0 / maxlength * width
x2 = width - 600
svg.drawLine(
x1,
hei - columnhei + height1 / 2 + 70,
x2,
hei - columnhei + height1 / 2 + 70,
3,
)
svg.drawLine(
x1,
hei - columnhei + height1 / 4 + 70,
x1,
hei - columnhei + height1 / 4 * 3 + 70,
3,
)
svg.drawLine(
x2,
hei - columnhei + height1 / 4 + 70,
x2,
hei - columnhei + height1 / 4 * 3 + 70,
3,
)
strfig2 = str(drawfig2)
if strfig2[-6:] == "000000":
strfig2 = strfig2[:-6] + " Mbp"
elif strfig2[-3:] == "000":
strfig2 = strfig2[:-3] + " Kbp"
svg.writeString(
strfig2,
(x1 + x2) / 2 - testbmp.lengthString(strfig2, 64) / 2,
hei - columnhei + height1 / 4 + 65,
64,
)
# draws the graph
if graphit != None:
thearray, maxgc, mingc, gheight, glinet, gtype, gmaxy, ggap = graphit
widthpixellist = []
leftpixellist = []
rightpixellist = []
for i in range(len(thearray)):
if aln == "best blast":
shifter = blastmatch[i]
if reverseList[i]:
rightpixel = convertPosR(secondlist[i * 2][0], maxlength, width, 0, aln)
leftpixel = convertPosR(
secondlist[i * 2][0], maxlength, width, secondlist[i * 2][0], aln
)
thearray[i].reverse()
else:
leftpixel = convertPos(secondlist[i * 2][0], maxlength, width, 0, aln)
rightpixel = convertPos(
secondlist[i * 2][0], maxlength, width, secondlist[i * 2][0], aln
)
widthpixel = rightpixel - leftpixel + 1
widthpixellist.append(widthpixel)
leftpixellist.append(leftpixel)
rightpixellist.append(rightpixel)
neg = False
if gmaxy == "Auto":
gmaxy = 0
for i in range(0, len(thearray)):
if min(thearray[i]) < 0:
neg = True
for j in range(0, widthpixellist[i]):
aa = int(j * (len(thearray[i]) * 1.0 / widthpixellist[i]))
bb = int((j + 1) * (len(thearray[i]) * 1.0 / widthpixellist[i]))
if aa == bb:
bb += 1
temparr = thearray[i][aa:bb]
gyval = abs(sum(temparr) * 1.0 / len(temparr))
if gyval > gmaxy:
gmaxy = gyval
else:
gmaxy = float(gmaxy)
for i in range(0, len(thearray)):
if min(thearray[i]) < 0:
neg = True
if neg:
axispos = ggap + gheight / 2 + glinet / 2
else:
axispos = ggap + gheight
for qq in range(len(thearray)):
lastgypos = None
svg.drawLine(
leftpixellist[qq],
axispos + glinet / 2,
rightpixellist[qq],
axispos + glinet / 2,
glinet,
)
for i in range(0, widthpixellist[qq]):
aa = int(i * (len(thearray[qq]) * 1.0 / widthpixellist[qq]))
bb = int((i + 1) * (len(thearray[qq]) * 1.0 / widthpixellist[qq]))
if aa == bb:
bb += 1
temparr = thearray[qq][aa:bb]
gyval = sum(temparr) * 1.0 / len(temparr)
yvalpixratio = gyval / gmaxy
if yvalpixratio > 1:
yvalpixratio = 1
if yvalpixratio < -1:
yvalpixratio = -1
if neg:
if yvalpixratio < 0:
gc1, gc2, gc3 = mingc
yvalpix = round(yvalpixratio * (gheight / 2 - glinet / 2))
if gtype == "Line":
if lastgypos != None:
svg.drawLine(
leftpixellist[qq] + i - 1,
lastgypos,
leftpixellist[qq] + i,
axispos - yvalpix,
1,
mingc,
)
lastgypos = axispos - yvalpix
elif gtype == "Histogram":
svg.drawLine(
leftpixellist[qq] + i,
axispos + glinet / 2,
leftpixellist[qq] + i,
axispos - yvalpix,
1,
mingc,
)
else:
gc1, gc2, gc3 = maxgc
yvalpix = round(
yvalpixratio * (gheight / 2 - (glinet - glinet / 2))
)
if gtype == "Line":
if lastgypos != None:
svg.drawLine(
leftpixellist[qq] + i - 1,
lastgypos,
leftpixellist[qq] + i,
axispos - glinet - yvalpix,
1,
maxgc,
)
lastgypos = axispos - glinet - yvalpix
elif gtype == "Histogram" and round(yvalpix) != 0.0:
svg.drawLine(
leftpixellist[qq] + i,
axispos - glinet / 2,
leftpixellist[qq] + i,
axispos - yvalpix,
1,
maxgc,
)
else:
yvalpix = round(yvalpixratio * (gheight - glinet))
if gtype == "Line":
if lastgypos != None:
svg.drawLine(
leftpixellist[qq] + i - 1,
lastgypos,
i,
leftpixellist[qq] - axispos - glinet - yvalpix,
1,
maxgc,
)
lastgypos = axispos - glinet - 1 - yvalpix
elif gtype == "Histogram" and round(yvalpix) != 0.0:
svg.drawLine(
leftpixellist[qq] + i,
axispos,
leftpixellist[qq] + i,
axispos - yvalpix,
1,
maxgc,
)
axispos += gheight + 2 * ggap + height1 + height2
modfig1 = (graphit[3] + 2 * ggap) * len(graphit[0])
else:
modfig1 = 0
# draws the blast gradient legend
if drawfig1 and minident != 101:
svg.writeString(
str(int(round(minident))) + "%", width - 300, hei - columnhei + 480, 64
)
svg.writeString("100%", width - 300, hei - columnhei + 84, 64)
svg.drawGradient(
width - 400, hei - columnhei + 20, 40, 460, minblastc, maxblastc
)
svg.drawGradient2(
width - 360, hei - columnhei + 20, 40, 460, minblastci, maxblastci
)
# draws feature and blast figures
for i in range(0, len(secondlist)):
# draws the blast figure
if i % 2 == 0:
if aln == "best blast":
shifter = blastmatch[int(i / 2)]
genrev1 = reverseList[int(i / 2)]
ymod = height1 * i / 2 + height2 * i / 2
if graphit != None:
ymod += (gheight + 2 * ggap) * (min([len(thearray), i / 2 + 1]))
if legend == "Top" or legend == "Top & Bottom":
ymod += sum(toplegpos[:3]) + 40
length = secondlist[i][0]
svg.drawLine(
convertPos(length, maxlength, width, 0, aln),
ymod + height1 / 2,
convertPos(length, maxlength, width, length, aln),
ymod + height1 / 2,
glt,
)
for j in secondlist[i][1]:
if abortCaptain:
return None
if (j.strand == "+" and not genrev1) or (j.strand == "-" and genrev1):
if type(j.start) == int:
if genrev1:
x2 = convertPosR(length, maxlength, width, j.start, aln)
x1 = convertPosR(length, maxlength, width, j.stop, aln)
else:
x1 = convertPos(length, maxlength, width, j.start, aln)
x2 = convertPos(length, maxlength, width, j.stop, aln)
if featDict[j.type][0] == "rect":
svg.drawOutRect(
x1, ymod, max([x2 - x1, 1]), height1, j.colour, genet
)
elif featDict[j.type][0] == "arrow":
svg.drawRightArrow(
x1,
ymod,
max([x2 - x1, 1]),
height1,
j.colour,
(0, 0, 0),
genet,
)
elif featDict[j.type][0] == "frame":
svg.drawRightFrame(
x1,
ymod,
max([x2 - x1, 1]),
height1,
genet,
j.start % 3,
j.colour,
)
elif featDict[j.type][0] == "pointer":
svg.drawPointer(x1, ymod, height1, genet, j.colour)
else:
if genrev1:
x2 = convertPosR(length, maxlength, width, j.start[-1], aln)
x1 = convertPosR(length, maxlength, width, j.stop[-1], aln)
else:
x1 = convertPos(length, maxlength, width, j.start[-1], aln)
x2 = convertPos(length, maxlength, width, j.stop[-1], aln)
if featDict[j.type][0] == "rect":
svg.drawOutRect(
x1, ymod, max([x2 - x1, 1]), height1, j.colour, genet
)
elif featDict[j.type][0] == "arrow":
svg.drawRightArrow(
x1,
ymod,
max([x2 - x1, 1]),
height1,
j.colour,
(0, 0, 0),
genet,
)
elif featDict[j.type][0] == "frame":
svg.drawRightFrame(
x1,
ymod,
max([x2 - x1, 1]),
height1,
genet,
j.start[-1] % 3,
j.colour,
)
elif featDict[j.type][0] == "pointer":
svg.drawPointer(x1, ymod, height1, genet, j.colour)
for k in range(2, len(j.start) + 1):
if genrev1:
x4 = convertPosR(
length, maxlength, width, j.start[-k], aln
)
x3 = convertPosR(
length, maxlength, width, j.stop[-k], aln
)
else:
x3 = convertPos(
length, maxlength, width, j.start[-k], aln
)
x4 = convertPos(
length, maxlength, width, j.stop[-k], aln
)
if (
featDict[j.type][0] == "arrow"
or featDict[j.type][0] == "rect"
):
if x1 - x4 > 2:
svg.drawDash(
x4, ymod + height1 / 4, x4, ymod, exont
)
svg.drawDash(x4, ymod, x1, ymod, exont)
svg.drawDash(
x1, ymod, x1, ymod + height1 / 4, exont
)
svg.drawOutRect(
x3,
ymod + height1 / 4,
x4 - x3,
height1 / 2,
j.colour,
genet,
)
elif featDict[j.type][0] == "frame":
if x1 - x4 > 2:
svg.drawDash(
x4, ymod + height1 / 4, x4, ymod, exont
)
svg.drawDash(x4, ymod, x1, ymod, exont)
svg.drawDash(
x1, ymod, x1, ymod + height1 / 4, exont
)
svg.drawRightFrameRect(
x3,
ymod,
x4 - x3,
height1,
genet,
j.start[-k] % 3,
j.colour,
)
# need to get exons working for other types
x1, x2 = x3, x4
else:
if type(j.start) == int:
if genrev1:
x2 = convertPosR(length, maxlength, width, j.start, aln)
x1 = convertPosR(length, maxlength, width, j.stop, aln)
else:
x1 = convertPos(length, maxlength, width, j.start, aln)
x2 = convertPos(length, maxlength, width, j.stop, aln)
if featDict[j.type][0] == "rect":
svg.drawOutRect(x1, ymod, x2 - x1, height1, j.colour, genet)
elif featDict[j.type][0] == "arrow":
svg.drawLeftArrow(
x1, ymod, x2 - x1, height1, j.colour, (0, 0, 0), genet
)
elif featDict[j.type][0] == "frame":
svg.drawLeftFrame(
x1, ymod, x2 - x1, height1, genet, j.stop % 3, j.colour
)
elif featDict[j.type][0] == "pointer":
svg.drawPointer(x1, ymod, height1, genet, j.colour)
else:
if genrev1:
x2 = convertPosR(length, maxlength, width, j.start[0], aln)
x1 = convertPosR(length, maxlength, width, j.stop[0], aln)
else:
x1 = convertPos(length, maxlength, width, j.start[0], aln)
x2 = convertPos(length, maxlength, width, j.stop[0], aln)
if featDict[j.type][0] == "rect":
svg.drawOutRect(x1, ymod, x2 - x1, height1, j.colour, genet)
elif featDict[j.type][0] == "arrow":
svg.drawLeftArrow(
x1, ymod, x2 - x1, height1, j.colour, (0, 0, 0), genet
)
elif featDict[j.type][0] == "frame":
svg.drawLeftFrame(
x1,
ymod,
x2 - x1,
height1,
genet,
j.stop[0] % 3,
j.colour,
)
elif featDict[j.type][0] == "pointer":
svg.drawPointer(x1, ymod, height1, genet, j.colour)
for k in range(1, len(j.start)):
if genrev1:
x4 = convertPosR(
length, maxlength, width, j.start[k], aln
)
x3 = convertPosR(
length, maxlength, width, j.stop[k], aln
)
else:
x3 = convertPos(
length, maxlength, width, j.start[k], aln
)
x4 = convertPos(
length, maxlength, width, j.stop[k], aln
)
if (
featDict[j.type][0] == "rect"
or featDict[j.type][0] == "arrow"
):
if x3 - x2 > 2:
svg.drawDash(
x2,
ymod + 3 * height1 / 4,
x2,
ymod + height1,
exont,
)
svg.drawDash(
x2, ymod + height1, x3, ymod + height1, exont
)
svg.drawDash(
x3,
ymod + height1,
x3,
ymod + 3 * height1 / 4,
exont,
)
elif featDict[j.type][0] == "frame":
if x3 - x2 > 2:
svg.drawDash(
x2,
ymod + 3 * height1 / 4,
x2,
ymod + height1,
exont,
)
svg.drawDash(
x2, ymod + height1, x3, ymod + height1, exont
)
svg.drawDash(
x3,
ymod + height1,
x3,
ymod + 3 * height1 / 4,
exont,
)
svg.drawLeftFrameRect(
x3,
ymod,
x4 - x3,
height1,
genet,
j.stop[k] % 3,
j.colour,
)
x1, x2 = x3, x4
else:
# draws teh blast hits
genrev2 = reverseList[int((i + 1) / 2)]
length1 = secondlist[i - 1][0]
length2 = secondlist[i + 1][0]
ymod = (height1 * (i - 1) / 2 + height2 * (i - 1) / 2) - 1 + height1
if graphit != None:
ymod += (gheight + 2 * ggap) * (min([len(thearray), i / 2 + 1]))
if legend == "Top" or legend == "Top & Bottom":
ymod += sum(toplegpos[:3]) + 40
y1 = ymod
y2 = y1 + height2 + 1
for j in secondlist[i]:
if abortCaptain:
return None
qStart, qEnd, rStart, rEnd, ident = j
# is the blast hit inverted
if (
(rStart < rEnd and not genrev1 and not genrev2)
or (rStart > rEnd and not genrev1 and genrev2)
or (rStart < rEnd and genrev1 and genrev2)
or (rStart > rEnd and genrev1 and not genrev2)
):
crisscross = False
else:
crisscross = True
try:
ratio = round((ident - minident) / (100 - minident), 2)
except:
ratio = 1
if crisscross:
r1 = int(minblastci[0] * (1 - ratio) + maxblastci[0] * ratio)
r2 = int(minblastci[1] * (1 - ratio) + maxblastci[1] * ratio)
r3 = int(minblastci[2] * (1 - ratio) + maxblastci[2] * ratio)
else:
r1 = int(minblastc[0] * (1 - ratio) + maxblastc[0] * ratio)
r2 = int(minblastc[1] * (1 - ratio) + maxblastc[1] * ratio)
r3 = int(minblastc[2] * (1 - ratio) + maxblastc[2] * ratio)
if aln == "best blast":
shifter = blastmatch[int(i / 2)]
if genrev1:
x1e = convertPosR(length1, maxlength, width, qStart, aln)
x1s = convertPosR(length1, maxlength, width, qEnd, aln)
else:
x1s = convertPos(length1, maxlength, width, qStart, aln)
x1e = convertPos(length1, maxlength, width, qEnd, aln)
if aln == "best blast":
shifter = blastmatch[int((i + 1) / 2)]
if genrev2 and rStart < rEnd:
x2e = convertPosR(length2, maxlength, width, rStart, aln)
x2s = convertPosR(length2, maxlength, width, rEnd, aln)
elif genrev2 and rStart >= rEnd:
x2s = convertPosR(length2, maxlength, width, rStart, aln)
x2e = convertPosR(length2, maxlength, width, rEnd, aln)
elif not genrev2 and rStart < rEnd:
x2s = convertPos(length2, maxlength, width, rStart, aln)
x2e = convertPos(length2, maxlength, width, rEnd, aln)
else:
x2e = convertPos(length2, maxlength, width, rStart, aln)
x2s = convertPos(length2, maxlength, width, rEnd, aln)
if crisscross:
svg.drawBlastHit(x1s, y1, x1e, y1, x2s, y2, x2e, y2, (r1, r2, r3))
if blastoutline:
svg.drawLine(x1s, y1, x2e, y2)
svg.drawLine(x1e, y1, x2s, y2)
else:
svg.drawBlastHit(x1s, y1, x1e, y1, x2e, y2, x2s, y2, (r1, r2, r3))
if blastoutline:
svg.drawLine(x1s, y1, x2s, y2)
svg.drawLine(x1e, y1, x2e, y2)
svg.writesvg(filename)
return minident
# The GUI
class App:
def __init__(self, master):
self.pwd = os.getcwd()
self.menubar = Menu(master)
self.filemenu = Menu(self.menubar, tearoff=0)
self.filemenu.add_command(label="New Figure", command=self.defaultoptions)
self.filemenu.add_command(label="Save Settings", command=self.saveOptions)
self.filemenu.add_command(label="Load Settings", command=self.openOptions)
self.filemenu.add_separator()
self.filemenu.add_command(label="Preferences", command=self.preferencewindow)
self.filemenu.add_separator()
self.filemenu.add_command(label="Exit", command=root.quit)
self.menubar.add_cascade(label="File", menu=self.filemenu)
# create more pulldown menus
self.confmenu = Menu(self.menubar, tearoff=0)
self.confmenu.add_command(label="Figure", command=self.figureoptions)
self.confmenu.add_command(label="Annotation", command=self.annotateoptions)
self.confmenu.add_command(label="Blast", command=self.blastoptions)
self.confmenu.add_command(label="Graph", command=self.graphoptions)
self.confmenu.add_command(label="Subregions", command=self.annmod)
self.menubar.add_cascade(label="Image", menu=self.confmenu)
self.blastmenu = Menu(self.menubar, tearoff=0)
self.blastmenu.add_command(
label="Download Blast Automatically", command=self.downloadBlastAuto
)
self.blastmenu.add_command(
label="Download Blast Manually", command=self.downloadBlastMan
)
self.blastmenu.add_command(
label="Choose Blast Location", command=self.chooseBlastDir
)
self.menubar.add_cascade(label="Blast", menu=self.blastmenu)
self.helpmenu = Menu(self.menubar, tearoff=0)
self.helpmenu.add_command(label="Help", command=self.openhelpsite)
self.helpmenu.add_command(label="Support", command=self.supportwin)
self.helpmenu.add_separator()
self.helpmenu.add_command(label="About", command=self.openabout)
self.helpmenu.add_command(label="Reference", command=self.openref)
self.menubar.add_cascade(label="Help", menu=self.helpmenu)
master.config(menu=self.menubar)
frame1 = Frame(master)
frame1.grid(row=0, column=0, padx=40, pady=10)
master.geometry("+10+20")
self.showit = False
self.running = False
self.graphshow = False
self.cutstate = None
self.orderstate = None
self.blastlDir = None
self.blastnDir = None
self.dblDir = None
self.dbnDir = None
self.workingDir = "test"
self.mincutlist = {}
self.maxcutlist = {}
self.revlist = {}
self.entrynum = 0
self.theTitle = Label(
frame1, text="Easyfig 2.2.3", font="TkDefaultFont 24 bold"
)
self.theTitle.grid(row=0, column=1, columnspan=3, padx=10, sticky="W")
self.annLab = Label(
frame1, text="Annotation Files", font="TkDefaultFont 13 bold underline"
)
self.annLab.grid(row=1, column=2, pady=10)
self.scrollbar = Scrollbar(frame1, orient=VERTICAL)
self.genlist = DDlistbox(frame1, yscrollcommand=self.scrollbar.set)
self.genlist.bind("<Double-Button-1>", self.annmod)
self.genlist.config(height=10)
self.blastlist = DDlistbox(frame1, yscrollcommand=self.scrollbar.set)
self.blastlist.config(height=9)
self.scrollbar.config(command=self.yview)
self.scrollbar.grid(row=2, column=1, rowspan=9, sticky=NS)
self.genlist.grid(row=2, column=2, rowspan=9)
self.annLab = Label(
frame1, text="Blast Files", font="TkDefaultFont 13 bold underline"
)
self.annLab.grid(row=1, column=3)
self.blastlist.grid(row=2, column=3, rowspan=9)
self.addgenbutton = Button(
frame1, text="Add feature file", command=self.addfeat
)
self.addgenbutton.grid(row=11, column=2, sticky=EW)
self.addgenbutton = Button(frame1, text="Add folder", command=self.addfolder)
self.addgenbutton.grid(row=12, column=2, sticky=EW)
self.removegenbutton = Button(
frame1, text="Remove feature file", command=self.removefeat
)
self.removegenbutton.grid(row=13, column=2, sticky=EW)
self.addblastbutton = Button(
frame1, text="Add blast file", command=self.addblast
)
self.addblastbutton.grid(row=11, column=3, sticky=EW)
self.removeblastbutton = Button(
frame1, text="Remove blast file", command=self.removeblast
)
self.removeblastbutton.grid(row=12, column=3, sticky=EW)
self.spacefiller = Label(frame1, text=" ")
self.spacefiller.grid(row=12, column=0)
self.blastit = Button(
frame1, text="Generate blastn Files", command=self.genBlast
)
self.blastit.grid(row=14, column=3, sticky="EW")
self.blastx = Button(
frame1, text="Generate tblastx Files", command=self.genBlastX
)
self.blastx.grid(row=15, column=3, sticky="EW")
self.outfile = StringVar(value="")
self.outopen = Button(frame1, text="Save As", command=self.getoutfile)
self.outopen.grid(row=17, column=2, columnspan=2, sticky=W)
self.outfilename = Entry(frame1, textvariable=self.outfile)
self.outfilename.grid(row=17, column=2, columnspan=2)
self.filetype = StringVar(value="Bitmap (bmp)")
self.filetypelabel = Label(frame1, text="File type:")
self.filetypelabel.grid(row=18, column=2, columnspan=2, pady=5, sticky=W)
self.filetypeentry = OptionMenu(
frame1,
self.filetype,
"Bitmap (bmp)",
"Vector file (svg)",
"Preview (shrink)",
"Preview (1:1)",
)
self.filetypeentry.grid(row=18, column=2, columnspan=2, pady=5)
self.createFigure = Button(
frame1,
text="Create Figure",
font="TkDefaultFont 12 bold",
width=20,
command=self.makeFigure,
)
self.createFigure.grid(row=19, column=2, columnspan=2, rowspan=3, sticky="NS")
self.processLab = Label(frame1, bg="#FFFF99", relief=SUNKEN)
self.processLab.grid(
row=14, column=1, rowspan=3, columnspan=2, sticky="NSEW", padx=5, pady=5
)
self.gap = Label(frame1, text=" ")
self.gap.grid(row=18, column=3)
self.gap2 = Label(frame1, text=" ")
self.gap2.grid(row=16, column=3)
self.gap3 = Label(frame1, text=" ")
self.gap3.grid(row=19, column=4)
self.gap4 = Label(frame1, text=" ")
self.gap4.grid(row=20, column=4)
self.gap4 = Label(frame1, text=" ")
self.gap4.grid(row=21, column=4)
self.defaultpreferences()
if os.path.exists(".easyfig.pref"):
self.opendefault()
def yview(self, *args):
apply(self.genlist.yview, args)
apply(self.blastlist.yview, args)
def addfolder(self):
tempfolder = tkFileDialog.askdirectory(
title="Please select a directory with feature files."
)
if tempfolder == () or tempfolder == "":
return
for i in os.listdir(tempfolder):
if self.entrynum == 99:
if self.genlist.size() == 99:
tkMessageBox.showerror(
"Maximum feature files reached.",
"At this time easyfig only supports 99 genomes.\nEasyfig_CL does not have a maximum limit.",
)
return
self.renumbergen()
filename = tempfolder + "/" + i
self.entrynum += 1
entryname = "%02d" % self.entrynum + ". " + filename
self.genlist.insert(END, entryname)
self.mincutlist[entryname[:2]] = "1"
self.maxcutlist[entryname[:2]] = "Max"
self.revlist[entryname[:2]] = False
self.genlist.xview_moveto(1)
def addfeat(self):
if self.entrynum == 99:
if self.genlist.size() == 99:
tkMessageBox.showerror(
"Maximum feature files reached.",
"At this time easyfig only supports 99 genomes.\nEasyfig_CL does not have a maximum limit.",
)
return
self.renumbergen()
filename = tkFileDialog.askopenfilename(
filetypes=[
(
"genbank/embl/fasta",
(
"*.gbk",
"*.embl",
"*.gb",
"*.fa",
"*.fna",
"*.dna",
"*.fas",
"*.fasta",
),
),
("All files", "*"),
]
)
if filename == "":
return
self.entrynum += 1
entryname = "%02d" % self.entrynum + ". " + filename
self.genlist.insert(END, entryname)
self.genlist.xview_moveto(1)
self.mincutlist[entryname[:2]] = "1"
self.maxcutlist[entryname[:2]] = "Max"
self.revlist[entryname[:2]] = False
def renumbergen(self):
try:
self.annwindow.destroy()
except:
pass
tempmincutlist = {}
tempmaxcutlist = {}
temprevlist = {}
for i in range(self.genlist.size()):
tempgen = self.genlist.get(i)
self.genlist.delete(i)
self.genlist.insert(i, "%02d" % (i + 1) + tempgen[2:])
tempmincutlist["%02d" % (i + 1)] = self.mincutlist[tempgen[:2]]
tempmaxcutlist["%02d" % (i + 1)] = self.maxcutlist[tempgen[:2]]
temprevlist["%02d" % (i + 1)] = self.revlist[tempgen[:2]]
self.mincutlist = tempmincutlist
self.maxcutlist = tempmaxcutlist
self.revlist = temprevlist
self.entrynum = self.genlist.size()
self.genlist.xview_moveto(1)
def removefeat(self):
self.genlist.delete(ANCHOR)
self.renumbergen()
def addblast(self):
filename = tkFileDialog.askopenfilename()
self.blastlist.insert(END, filename)
self.blastlist.xview_moveto(1)
self.orderstate = None
self.cutstate = None
def removeblast(self):
self.blastlist.delete(ANCHOR)
def defaultoptions(self):
try:
self.prefwin.destroy()
except:
pass
try:
self.figureoptionswindow.destroy()
except:
pass
try:
self.blastoptionswindow.destroy()
except:
pass
try:
self.annotateoptionswindow.destroy()
except:
pass
try:
self.graphoptionswindow.destroy()
except:
pass
try:
self.annwindow.destroy()
except:
pass
try:
self.doublecutswin.destroy()
except:
pass
self.outfile.set("")
self.genlist.delete(0, END)
self.blastlist.delete(0, END)
self.defaultpreferences()
self.entrynum = 0
self.cutstate = None
self.orderstate = None
if os.path.exists(".easyfig.pref"):
self.opendefault()
else:
self.defaultpreferences()
def defaultpreferences(self):
self.filetype.set("Bitmap (bmp)")
self.figwidthvar = StringVar(value="5000")
self.height1var = StringVar(value="150")
self.height2var = StringVar(value="500")
self.aln = StringVar(value="centre")
self.autodetect = IntVar()
self.autodetect.set(1)
self.drawfig1 = IntVar()
self.drawfig1.set(1)
self.drawfig2var = StringVar(value="0")
self.minlengthvar = StringVar(value="0")
self.minevalvar = StringVar(value="0.001")
self.minIdentvar = StringVar(value="0")
self.minblastc = (200, 200, 200)
self.minblastchex = "#C8C8C8"
self.minblastci = (200, 200, 200)
self.minblastcihex = "#C8C8C8"
self.maxblastc = (100, 100, 100)
self.maxblastchex = "#646464"
self.maxblastci = (100, 100, 100)
self.maxblastcihex = "#646464"
self.blastoutline = IntVar()
self.blastoutline.set(1)
self.leg = StringVar(value="None")
self.leg2 = StringVar(value="None")
self.legname = StringVar(value="gene")
self.gltvar = StringVar(value="20")
self.exontvar = StringVar(value="2")
self.genetvar = StringVar(value="1")
self.genef = IntVar()
self.genef.set(1)
self.genefcolour = (64, 224, 208)
self.genefcolourhex = "#40e0d0"
self.genefrect = StringVar(value="arrow")
self.cdsf = IntVar()
self.cdsfcolour = (255, 140, 0)
self.cdsfcolourhex = "#ff8c00"
self.cdsfrect = StringVar(value="arrow")
self.trnaf = IntVar()
self.trnafcolour = (165, 42, 42)
self.trnafcolourhex = "#a52a2a"
self.trnafrect = StringVar(value="rect")
self.miscf = IntVar()
self.miscfcolour = (0, 191, 255)
self.miscfcolourhex = "#00bfff"
self.miscfrect = StringVar(value="rect")
self.randfeat = StringVar()
self.randf = IntVar()
self.randfcolour = (72, 61, 139)
self.randfcolourhex = "#483d8b"
self.randfrect = StringVar(value="arrow")
self.graphtype = StringVar(value="None")
self.allorone = IntVar()
self.graphfile = StringVar()
self.step = StringVar(value="1000")
self.windsize = StringVar(value="1000")
self.graphheight = StringVar(value="200")
self.maxy = StringVar(value="Auto")
self.logit = IntVar()
self.histo = StringVar(value="Histogram")
self.graphlinet = StringVar(value="1")
self.poscol = (255, 0, 0)
self.poscolhex = "#FF0000"
self.negcol = (0, 0, 255)
self.negcolhex = "#0000FF"
self.ggap = StringVar(value="10")
self.blastnDir = None
self.dbnDir = None
def saveOptions(self):
filename = ""
filename = tkFileDialog.asksaveasfilename(
filetypes=[("easycfg", "*.easycfg"), ("All files", "*")]
)
if filename == "" or filename == ():
return
savefile = open(filename, "w")
savefile.write("\t".join(self.genlist.get(0, END)) + "\n")
for i in self.genlist.get(0, END):
savefile.write(
i[:2]
+ "\t"
+ self.mincutlist[i[:2]]
+ "\t"
+ self.maxcutlist[i[:2]]
+ "\t"
+ str(self.revlist[i[:2]])
+ "\n"
)
savefile.write("\t".join(self.blastlist.get(0, END)) + "\n")
savefile.write(self.outfile.get() + "\n")
savefile.write(self.filetype.get() + "\n")
savefile.write(self.figwidthvar.get() + "\n")
savefile.write(self.height1var.get() + "\n")
savefile.write(self.height2var.get() + "\n")
savefile.write(self.aln.get() + "\n")
savefile.write(str(self.autodetect.get()) + "\n")
savefile.write(str(self.drawfig1.get()) + "\n")
savefile.write(self.drawfig2var.get() + "\n")
savefile.write(self.minlengthvar.get() + "\n")
savefile.write(self.minevalvar.get() + "\n")
savefile.write(self.minIdentvar.get() + "\n")
savefile.write(str(self.minblastc[0]) + "\n")
savefile.write(str(self.minblastc[1]) + "\n")
savefile.write(str(self.minblastc[2]) + "\n")
savefile.write(self.minblastchex + "\n")
savefile.write(str(self.minblastci[0]) + "\n")
savefile.write(str(self.minblastci[1]) + "\n")
savefile.write(str(self.minblastci[2]) + "\n")
savefile.write(self.minblastcihex + "\n")
savefile.write(str(self.maxblastc[0]) + "\n")
savefile.write(str(self.maxblastc[1]) + "\n")
savefile.write(str(self.maxblastc[2]) + "\n")
savefile.write(self.maxblastchex + "\n")
savefile.write(str(self.maxblastci[0]) + "\n")
savefile.write(str(self.maxblastci[1]) + "\n")
savefile.write(str(self.maxblastci[2]) + "\n")
savefile.write(self.maxblastcihex + "\n")
savefile.write(str(self.blastoutline.get()) + "\n")
savefile.write(self.leg.get() + "\n")
savefile.write(self.leg2.get() + "\n")
savefile.write(self.legname.get() + "\n")
savefile.write(self.gltvar.get() + "\n")
savefile.write(self.exontvar.get() + "\n")
savefile.write(self.genetvar.get() + "\n")
savefile.write(str(self.genef.get()) + "\n")
savefile.write(str(self.genefcolour[0]) + "\n")
savefile.write(str(self.genefcolour[1]) + "\n")
savefile.write(str(self.genefcolour[2]) + "\n")
savefile.write(self.genefcolourhex + "\n")
savefile.write(self.genefrect.get() + "\n")
savefile.write(str(self.cdsf.get()) + "\n")
savefile.write(str(self.cdsfcolour[0]) + "\n")
savefile.write(str(self.cdsfcolour[1]) + "\n")
savefile.write(str(self.cdsfcolour[2]) + "\n")
savefile.write(self.cdsfcolourhex + "\n")
savefile.write(self.cdsfrect.get() + "\n")
savefile.write(str(self.trnaf.get()) + "\n")
savefile.write(str(self.trnafcolour[0]) + "\n")
savefile.write(str(self.trnafcolour[1]) + "\n")
savefile.write(str(self.trnafcolour[2]) + "\n")
savefile.write(self.trnafcolourhex + "\n")
savefile.write(self.trnafrect.get() + "\n")
savefile.write(str(self.miscf.get()) + "\n")
savefile.write(str(self.miscfcolour[0]) + "\n")
savefile.write(str(self.miscfcolour[1]) + "\n")
savefile.write(str(self.miscfcolour[2]) + "\n")
savefile.write(self.miscfcolourhex + "\n")
savefile.write(self.miscfrect.get() + "\n")
savefile.write(self.randfeat.get() + "\n")
savefile.write(str(self.randf.get()) + "\n")
savefile.write(str(self.randfcolour[0]) + "\n")
savefile.write(str(self.randfcolour[1]) + "\n")
savefile.write(str(self.randfcolour[2]) + "\n")
savefile.write(self.randfcolourhex + "\n")
savefile.write(self.randfrect.get() + "\n")
savefile.write(self.graphtype.get() + "\n")
savefile.write(str(self.allorone.get()) + "\n")
savefile.write(self.graphfile.get() + "\n")
savefile.write(self.step.get() + "\n")
savefile.write(self.windsize.get() + "\n")
savefile.write(self.graphheight.get() + "\n")
savefile.write(self.maxy.get() + "\n")
savefile.write(str(self.logit.get()) + "\n")
savefile.write(self.histo.get() + "\n")
savefile.write(self.graphlinet.get() + "\n")
savefile.write(str(self.poscol[0]) + "\n")
savefile.write(str(self.poscol[1]) + "\n")
savefile.write(str(self.poscol[2]) + "\n")
savefile.write(self.poscolhex + "\n")
savefile.write(str(self.negcol[0]) + "\n")
savefile.write(str(self.negcol[1]) + "\n")
savefile.write(str(self.negcol[2]) + "\n")
savefile.write(self.negcolhex + "\n")
savefile.write(self.ggap.get() + "\n")
savefile.close()
def openOptions(self):
try:
filename = tkFileDialog.askopenfilename(
filetypes=[("easycfg", "*.easycfg"), ("All files", "*")]
)
if filename == "":
return
openfile = open(filename)
templist = openfile.readline().rstrip().split("\t")
self.genlist.delete(0, END)
if templist == [""]:
templist = []
for i in templist:
self.genlist.insert(END, i)
self.genlist.xview_moveto(1)
for i in range(len(templist)):
name, mincut, maxcut, rev = openfile.readline().rstrip().split("\t")
self.mincutlist[name] = mincut
self.maxcutlist[name] = maxcut
if rev == "True":
self.revlist[name] = True
else:
self.revlist[name] = False
templist = openfile.readline().rstrip().split("\t")
if templist == [""]:
templist = []
self.blastlist.delete(0, END)
for i in templist:
self.blastlist.insert(END, i)
self.blastlist.xview_moveto(1)
self.outfile.set(openfile.readline().rstrip())
self.filetype.set(openfile.readline().rstrip())
self.figwidthvar.set(openfile.readline().rstrip())
self.height1var.set(openfile.readline().rstrip())
self.height2var.set(openfile.readline().rstrip())
self.aln.set(openfile.readline().rstrip())
self.autodetect.set(int(openfile.readline().rstrip()))
self.drawfig1.set(int(openfile.readline().rstrip()))
self.drawfig2var.set(openfile.readline().rstrip())
self.minlengthvar.set(openfile.readline().rstrip())
self.minevalvar.set(openfile.readline().rstrip())
self.minIdentvar.set(openfile.readline().rstrip())
x = int(openfile.readline().rstrip())
y = int(openfile.readline().rstrip())
z = int(openfile.readline().rstrip())
self.minblastc = (x, y, z)
self.minblastchex = openfile.readline().rstrip()
x = int(openfile.readline().rstrip())
y = int(openfile.readline().rstrip())
z = int(openfile.readline().rstrip())
self.minblastci = (x, y, z)
self.minblastcihex = openfile.readline().rstrip()
x = int(openfile.readline().rstrip())
y = int(openfile.readline().rstrip())
z = int(openfile.readline().rstrip())
self.maxblastc = (x, y, z)
self.maxblastchex = openfile.readline().rstrip()
x = int(openfile.readline().rstrip())
y = int(openfile.readline().rstrip())
z = int(openfile.readline().rstrip())
self.maxblastci = (x, y, z)
self.maxblastcihex = openfile.readline().rstrip()
self.blastoutline.set(int(openfile.readline().rstrip()))
self.leg.set(openfile.readline().rstrip())
self.leg2.set(openfile.readline().rstrip())
self.legname.set(openfile.readline().rstrip())
self.gltvar.set(openfile.readline().rstrip())
self.exontvar.set(openfile.readline().rstrip())
self.genetvar.set(openfile.readline().rstrip())
self.genef.set(int(openfile.readline().rstrip()))
x = int(openfile.readline().rstrip())
y = int(openfile.readline().rstrip())
z = int(openfile.readline().rstrip())
self.genefcolour = (x, y, z)
self.genefcolourhex = openfile.readline().rstrip()
self.genefrect.set(openfile.readline().rstrip())
self.cdsf.set(int(openfile.readline().rstrip()))
x = int(openfile.readline().rstrip())
y = int(openfile.readline().rstrip())
z = int(openfile.readline().rstrip())
self.cdsfcolour = (x, y, z)
self.cdsfcolourhex = openfile.readline().rstrip()
self.cdsfrect.set(openfile.readline().rstrip())
self.trnaf.set(int(openfile.readline().rstrip()))
x = int(openfile.readline().rstrip())
y = int(openfile.readline().rstrip())
z = int(openfile.readline().rstrip())
self.trnafcolour = (x, y, z)
self.trnafcolourhex = openfile.readline().rstrip()
self.trnafrect.set(openfile.readline().rstrip())
self.miscf.set(int(openfile.readline().rstrip()))
x = int(openfile.readline().rstrip())
y = int(openfile.readline().rstrip())
z = int(openfile.readline().rstrip())
self.miscfcolour = (x, y, z)
self.miscfcolourhex = openfile.readline().rstrip()
self.miscfrect.set(openfile.readline().rstrip())
self.randfeat.set(openfile.readline().rstrip())
self.randf.set(int(openfile.readline().rstrip()))
x = int(openfile.readline().rstrip())
y = int(openfile.readline().rstrip())
z = int(openfile.readline().rstrip())
self.randfcolour = (x, y, z)
self.randfcolourhex = openfile.readline().rstrip()
self.randfrect.set(openfile.readline().rstrip())
self.graphtype.set(openfile.readline().rstrip())
self.allorone.set(int(openfile.readline().rstrip()))
self.graphfile.set(openfile.readline().rstrip())
self.step.set(openfile.readline().rstrip())
self.windsize.set(openfile.readline().rstrip())
self.graphheight.set(openfile.readline().rstrip())
self.maxy.set(openfile.readline().rstrip())
self.logit.set(openfile.readline().rstrip())
self.histo.set(openfile.readline().rstrip())
self.graphlinet.set(openfile.readline().rstrip())
x = int(openfile.readline().rstrip())
y = int(openfile.readline().rstrip())
z = int(openfile.readline().rstrip())
self.poscol = (x, y, z)
self.poscolhex = openfile.readline().rstrip()
x = int(openfile.readline().rstrip())
y = int(openfile.readline().rstrip())
z = int(openfile.readline().rstrip())
self.negcol = (x, y, z)
self.negcolhex = openfile.readline().rstrip()
self.ggap.set(openfile.readline().rstrip())
openfile.close()
except:
tkMessageBox.showerror("Try again.", "Easyfig config file invalid.")
def opendefault(self):
try:
if not os.path.exists(".easyfig.pref"):
self.defaultpreferences()
return
preffile = open(".easyfig.pref")
gotpref = False
for line in preffile:
if line.startswith(">"):
preflist = line.split("\t")[1:]
gotpref = True
preffile.close()
if not gotpref:
self.defaultpreferences()
return
self.filetype.set(preflist.pop(0))
self.figwidthvar.set(preflist.pop(0))
self.height1var.set(preflist.pop(0))
self.height2var.set(preflist.pop(0))
self.aln.set(preflist.pop(0))
self.autodetect.set(int(preflist.pop(0)))
self.drawfig1.set(int(preflist.pop(0)))
self.drawfig2var.set(preflist.pop(0))
self.minlengthvar.set(preflist.pop(0))
self.minevalvar.set(preflist.pop(0))
self.minIdentvar.set(preflist.pop(0))
x = int(preflist.pop(0))
y = int(preflist.pop(0))
z = int(preflist.pop(0))
self.minblastc = (x, y, z)
self.minblastchex = preflist.pop(0)
x = int(preflist.pop(0))
y = int(preflist.pop(0))
z = int(preflist.pop(0))
self.minblastci = (x, y, z)
self.minblastcihex = preflist.pop(0)
x = int(preflist.pop(0))
y = int(preflist.pop(0))
z = int(preflist.pop(0))
self.maxblastc = (x, y, z)
self.maxblastchex = preflist.pop(0)
x = int(preflist.pop(0))
y = int(preflist.pop(0))
z = int(preflist.pop(0))
self.maxblastci = (x, y, z)
self.maxblastcihex = preflist.pop(0)
self.blastoutline.set(int(preflist.pop(0)))
self.leg.set(preflist.pop(0))
self.leg2.set(preflist.pop(0))
self.legname.set(preflist.pop(0))
self.gltvar.set(preflist.pop(0))
self.exontvar.set(preflist.pop(0))
self.genetvar.set(preflist.pop(0))
self.genef.set(int(preflist.pop(0)))
x = int(preflist.pop(0))
y = int(preflist.pop(0))
z = int(preflist.pop(0))
self.genefcolour = (x, y, z)
self.genefcolourhex = preflist.pop(0)
self.genefrect.set(preflist.pop(0))
self.cdsf.set(int(preflist.pop(0)))
x = int(preflist.pop(0))
y = int(preflist.pop(0))
z = int(preflist.pop(0))
self.cdsfcolour = (x, y, z)
self.cdsfcolourhex = preflist.pop(0)
self.cdsfrect.set(preflist.pop(0))
self.trnaf.set(int(preflist.pop(0)))
x = int(preflist.pop(0))
y = int(preflist.pop(0))
z = int(preflist.pop(0))
self.trnafcolour = (x, y, z)
self.trnafcolourhex = preflist.pop(0)
self.trnafrect.set(preflist.pop(0))
self.miscf.set(int(preflist.pop(0)))
x = int(preflist.pop(0))
y = int(preflist.pop(0))
z = int(preflist.pop(0))
self.miscfcolour = (x, y, z)
self.miscfcolourhex = preflist.pop(0)
self.miscfrect.set(preflist.pop(0))
self.randfeat.set(preflist.pop(0))
self.randf.set(int(preflist.pop(0)))
x = int(preflist.pop(0))
y = int(preflist.pop(0))
z = int(preflist.pop(0))
self.randfcolour = (x, y, z)
self.randfcolourhex = preflist.pop(0)
self.randfrect.set(preflist.pop(0))
self.graphtype.set(preflist.pop(0))
self.allorone.set(int(preflist.pop(0)))
self.graphfile.set(preflist.pop(0))
self.step.set(preflist.pop(0))
self.windsize.set(preflist.pop(0))
self.graphheight.set(preflist.pop(0))
self.maxy.set(preflist.pop(0))
self.logit.set(preflist.pop(0))
self.histo.set(preflist.pop(0))
self.graphlinet.set(preflist.pop(0))
x = int(preflist.pop(0))
y = int(preflist.pop(0))
z = int(preflist.pop(0))
self.poscol = (x, y, z)
self.poscolhex = preflist.pop(0)
x = int(preflist.pop(0))
y = int(preflist.pop(0))
z = int(preflist.pop(0))
self.negcol = (x, y, z)
self.negcolhex = preflist.pop(0)
self.ggap.set(preflist.pop(0))
self.blastnDir = preflist.pop(0)
if self.blastnDir == "None":
self.blastnDir = None
self.dbnDir = preflist.pop(0).rstrip()
if self.dbnDir == "None":
self.dbnDir = None
except:
self.defaultpreferences()
def openhelpsite(self):
webbrowser.open_new("https://github.com/mjsull/Easyfig/wiki")
def openabout(self):
try:
self.aboutpanel.destroy()
except:
pass
self.aboutpanel = Toplevel()
self.frame7 = Frame(self.aboutpanel)
self.about1label = Label(
self.frame7, text="Easyfig", font="TkDefaultFont 13 bold"
)
self.about1label.grid(row=0, column=0)
self.about2label = Label(
self.frame7,
text="Easyfig is a Python application for creating linear\n\
comparison figures of multiple genomic loci\n with an easy-to-use graphical user interface (GUI).\n\n\
Version 2.2.3\n\nIf Easyfig is used to generate figures for publication,\n\
please cite our paper:\n\n\
Sullivan MJ, Petty NK, Beatson SA. (2011)\nEasyfig: a genome comparison visualiser.\nBioinformatics; 27 (7): 1009-1010",
)
self.about2label.grid(row=1, column=0)
self.frame7.grid(padx=10, pady=10)
def supportwin(self):
try:
self.supportpanel.destroy()
except:
pass
self.supportpanel = Toplevel()
self.frame9 = Frame(self.supportpanel)
self.about1label1 = Label(
self.frame9, text="Easyfig", font="TkDefaultFont 13 bold"
)
self.about1label1.grid(row=0, column=0)
self.supportlabel2 = Label(
self.frame9,
text="written by Mitchell Sullivan - mjsull@gmail.com\n\
To submit a bug please visit https://github.com/mjsull/Easyfig/issues.",
)
self.supportlabel2.grid(row=1, column=0)
self.frame9.grid(padx=10, pady=10)
def preferencewindow(self):
try:
self.prefwin.destroy()
except:
pass
self.prefwin = Toplevel()
self.frame8 = Frame(self.prefwin)
self.prefwin.title("preferences")
self.scrollbar3 = Scrollbar(self.frame8)
self.preflist = Listbox(self.frame8, yscrollcommand=self.scrollbar3.set)
templist = ["easyfig_standard"]
validfile = True
if os.path.exists(".easyfig.pref"):
preffile = open(".easyfig.pref")
for line in preffile:
if len(line.split("\t")) == 87:
templist.append(line.split("\t")[0])
else:
validfile = False
if not validfile:
nocompare = tkMessageBox.askquestion(
"Preference file not valid",
"Do you wish to create a new preference file at "
+ os.getcwd()
+ "/.easyfig.pref?",
parent=self.frame8,
)
if nocompare == "no":
return
else:
preffile = open(".easyfig.pref", "w")
preffile.close()
templist.sort(key=str.lower)
for i in templist:
self.preflist.insert(END, i)
self.preflist.grid(column=1, row=0, rowspan=10)
self.scrollbar3.config(command=self.preflist.yview)
self.scrollbar3.grid(column=0, row=0, rowspan=10, sticky=NS)
self.addprefbut = Button(
self.frame8, text="Save preferences as", command=self.addpref
)
self.addprefbut.grid(column=2, row=0, sticky=EW)
self.loadprefbut = Button(
self.frame8, text="Load preferences", command=self.loadpref
)
self.loadprefbut.grid(column=2, row=1, sticky=EW)
self.removeprefbut = Button(self.frame8, text="Remove", command=self.removepref)
self.removeprefbut.grid(column=2, row=2, sticky=EW)
self.setdefaultbut = Button(
self.frame8, text="Set as default", command=self.setdefault
)
self.setdefaultbut.grid(column=2, row=3, sticky=EW)
self.closeprefwinbut = Button(
self.frame8, text="close", command=self.closeprefwin
)
self.closeprefwinbut.grid(column=2, row=9, sticky=E)
self.frame8.grid(padx=20, pady=20)
def addpref(self):
preffile = open(".easyfig.pref", "a")
savename = tkSimpleDialog.askstring(
"Input name",
"Please choose name to save preferences under.",
parent=self.frame8,
)
if savename == None:
return None
while savename in self.preflist.get(0, END):
savename = tkSimpleDialog.askstring(
"Name taken",
"Please choose name to save preferences under.",
parent=self.frame8,
)
if savename == None:
return None
savestring = savename + "\t"
savestring += self.filetype.get() + "\t"
savestring += self.figwidthvar.get() + "\t"
savestring += self.height1var.get() + "\t"
savestring += self.height2var.get() + "\t"
savestring += self.aln.get() + "\t"
savestring += str(self.autodetect.get()) + "\t"
savestring += str(self.drawfig1.get()) + "\t"
savestring += self.drawfig2var.get() + "\t"
savestring += self.minlengthvar.get() + "\t"
savestring += self.minevalvar.get() + "\t"
savestring += self.minIdentvar.get() + "\t"
savestring += str(self.minblastc[0]) + "\t"
savestring += str(self.minblastc[1]) + "\t"
savestring += str(self.minblastc[2]) + "\t"
savestring += self.minblastchex + "\t"
savestring += str(self.minblastci[0]) + "\t"
savestring += str(self.minblastci[1]) + "\t"
savestring += str(self.minblastci[2]) + "\t"
savestring += self.minblastcihex + "\t"
savestring += str(self.maxblastc[0]) + "\t"
savestring += str(self.maxblastc[1]) + "\t"
savestring += str(self.maxblastc[2]) + "\t"
savestring += self.maxblastchex + "\t"
savestring += str(self.maxblastci[0]) + "\t"
savestring += str(self.maxblastci[1]) + "\t"
savestring += str(self.maxblastci[2]) + "\t"
savestring += self.maxblastcihex + "\t"
savestring += str(self.blastoutline.get()) + "\t"
savestring += self.leg.get() + "\t"
savestring += self.leg2.get() + "\t"
savestring += self.legname.get() + "\t"
savestring += self.gltvar.get() + "\t"
savestring += self.exontvar.get() + "\t"
savestring += self.genetvar.get() + "\t"
savestring += str(self.genef.get()) + "\t"
savestring += str(self.genefcolour[0]) + "\t"
savestring += str(self.genefcolour[1]) + "\t"
savestring += str(self.genefcolour[2]) + "\t"
savestring += self.genefcolourhex + "\t"
savestring += self.genefrect.get() + "\t"
savestring += str(self.cdsf.get()) + "\t"
savestring += str(self.cdsfcolour[0]) + "\t"
savestring += str(self.cdsfcolour[1]) + "\t"
savestring += str(self.cdsfcolour[2]) + "\t"
savestring += self.cdsfcolourhex + "\t"
savestring += self.cdsfrect.get() + "\t"
savestring += str(self.trnaf.get()) + "\t"
savestring += str(self.trnafcolour[0]) + "\t"
savestring += str(self.trnafcolour[1]) + "\t"
savestring += str(self.trnafcolour[2]) + "\t"
savestring += self.trnafcolourhex + "\t"
savestring += self.trnafrect.get() + "\t"
savestring += str(self.miscf.get()) + "\t"
savestring += str(self.miscfcolour[0]) + "\t"
savestring += str(self.miscfcolour[1]) + "\t"
savestring += str(self.miscfcolour[2]) + "\t"
savestring += self.miscfcolourhex + "\t"
savestring += self.miscfrect.get() + "\t"
savestring += self.randfeat.get() + "\t"
savestring += str(self.randf.get()) + "\t"
savestring += str(self.randfcolour[0]) + "\t"
savestring += str(self.randfcolour[1]) + "\t"
savestring += str(self.randfcolour[2]) + "\t"
savestring += self.randfcolourhex + "\t"
savestring += self.randfrect.get() + "\t"
savestring += self.graphtype.get() + "\t"
savestring += str(self.allorone.get()) + "\t"
savestring += self.graphfile.get() + "\t"
savestring += self.step.get() + "\t"
savestring += self.windsize.get() + "\t"
savestring += self.graphheight.get() + "\t"
savestring += self.maxy.get() + "\t"
savestring += str(self.logit.get()) + "\t"
savestring += self.histo.get() + "\t"
savestring += self.graphlinet.get() + "\t"
savestring += str(self.poscol[0]) + "\t"
savestring += str(self.poscol[1]) + "\t"
savestring += str(self.poscol[2]) + "\t"
savestring += self.poscolhex + "\t"
savestring += str(self.negcol[0]) + "\t"
savestring += str(self.negcol[1]) + "\t"
savestring += str(self.negcol[2]) + "\t"
savestring += self.negcolhex + "\t"
savestring += self.ggap.get() + "\t"
if self.blastnDir == None:
savestring += "None\t"
else:
savestring += self.blastnDir + "\t"
if self.dbnDir == None:
savestring += "None\n"
else:
savestring += self.dbnDir + "\n"
if savestring.count("\t") == 86:
if savestring.startswith(">"):
tkMessageBox.showerror(
"Try again.", "Please remove > from start of preference name."
)
else:
preffile.write(savestring)
self.preflist.insert(END, savename)
else:
tkMessageBox.showerror(
"Try again.", "<tab> character in variable, please remove."
)
preffile.close()
def loadpref(self):
try:
prefname = self.preflist.get(ACTIVE)
if prefname == "easyfig_standard":
self.defaultpreferences()
return
if not os.path.exists(".easyfig.pref"):
tkMessageBox.showerror("Try again.", "Where'd the preference file go?")
return
preffile = open(".easyfig.pref")
for line in preffile:
splitline = line.split("\t")
if splitline[0] == prefname:
preflist = splitline[1:]
preffile.close()
self.filetype.set(preflist.pop(0))
self.figwidthvar.set(preflist.pop(0))
self.height1var.set(preflist.pop(0))
self.height2var.set(preflist.pop(0))
self.aln.set(preflist.pop(0))
self.autodetect.set(int(preflist.pop(0)))
self.drawfig1.set(int(preflist.pop(0)))
self.drawfig2var.set(preflist.pop(0))
self.minlengthvar.set(preflist.pop(0))
self.minevalvar.set(preflist.pop(0))
self.minIdentvar.set(preflist.pop(0))
x = int(preflist.pop(0))
y = int(preflist.pop(0))
z = int(preflist.pop(0))
self.minblastc = (x, y, z)
self.minblastchex = preflist.pop(0)
x = int(preflist.pop(0))
y = int(preflist.pop(0))
z = int(preflist.pop(0))
self.minblastci = (x, y, z)
self.minblastcihex = preflist.pop(0)
x = int(preflist.pop(0))
y = int(preflist.pop(0))
z = int(preflist.pop(0))
self.maxblastc = (x, y, z)
self.maxblastchex = preflist.pop(0)
x = int(preflist.pop(0))
y = int(preflist.pop(0))
z = int(preflist.pop(0))
self.maxblastci = (x, y, z)
self.maxblastcihex = preflist.pop(0)
self.blastoutline.set(int(preflist.pop(0)))
self.leg.set(preflist.pop(0))
self.leg2.set(preflist.pop(0))
self.legname.set(preflist.pop(0))
self.gltvar.set(preflist.pop(0))
self.exontvar.set(preflist.pop(0))
self.genetvar.set(preflist.pop(0))
self.genef.set(int(preflist.pop(0)))
x = int(preflist.pop(0))
y = int(preflist.pop(0))
z = int(preflist.pop(0))
self.genefcolour = (x, y, z)
self.genefcolourhex = preflist.pop(0)
self.genefrect.set(preflist.pop(0))
self.cdsf.set(int(preflist.pop(0)))
x = int(preflist.pop(0))
y = int(preflist.pop(0))
z = int(preflist.pop(0))
self.cdsfcolour = (x, y, z)
self.cdsfcolourhex = preflist.pop(0)
self.cdsfrect.set(preflist.pop(0))
self.trnaf.set(int(preflist.pop(0)))
x = int(preflist.pop(0))
y = int(preflist.pop(0))
z = int(preflist.pop(0))
self.trnafcolour = (x, y, z)
self.trnafcolourhex = preflist.pop(0)
self.trnafrect.set(preflist.pop(0))
self.miscf.set(int(preflist.pop(0)))
x = int(preflist.pop(0))
y = int(preflist.pop(0))
z = int(preflist.pop(0))
self.miscfcolour = (x, y, z)
self.miscfcolourhex = preflist.pop(0)
self.miscfrect.set(preflist.pop(0))
self.randfeat.set(preflist.pop(0))
self.randf.set(int(preflist.pop(0)))
x = int(preflist.pop(0))
y = int(preflist.pop(0))
z = int(preflist.pop(0))
self.randfcolour = (x, y, z)
self.randfcolourhex = preflist.pop(0)
self.randfrect.set(preflist.pop(0))
self.graphtype.set(preflist.pop(0))
self.allorone.set(int(preflist.pop(0)))
self.graphfile.set(preflist.pop(0))
self.step.set(preflist.pop(0))
self.windsize.set(preflist.pop(0))
self.graphheight.set(preflist.pop(0))
self.maxy.set(preflist.pop(0))
self.logit.set(preflist.pop(0))
self.histo.set(preflist.pop(0))
self.graphlinet.set(preflist.pop(0))
x = int(preflist.pop(0))
y = int(preflist.pop(0))
z = int(preflist.pop(0))
self.poscol = (x, y, z)
self.poscolhex = preflist.pop(0)
x = int(preflist.pop(0))
y = int(preflist.pop(0))
z = int(preflist.pop(0))
self.negcol = (x, y, z)
self.negcolhex = preflist.pop(0)
self.ggap.set(preflist.pop(0))
self.blastnDir = preflist.pop(0)
if self.blastnDir == "None":
self.blastnDir = None
self.dbnDir = preflist.pop(0).rstrip()
if self.dbnDir == "None":
self.dbnDir = None
except:
self.defaultpreferences()
tkMessageBox.showerror(
"Preference File Invalid", "Loaded default preferences."
)
nocompare = tkMessageBox.askquestion(
"Preference file not valid",
"Do you wish to create a new preference file at "
+ os.getcwd()
+ "/.easyfig.pref?",
parent=self.frame8,
)
if nocompare == "no":
return
else:
preffile = open(".easyfig.pref", "w")
preffile.close()
self.preflist.delete(0, END)
self.preflist.insert("easyfig_standard")
def removepref(self):
nocompare = tkMessageBox.askquestion(
"Delete?",
"Are you sure you wish to delete this preference?",
parent=self.frame8,
)
if nocompare == "no":
return
preffile = open(".easyfig.pref")
preflist = []
prefname = self.preflist.get(ACTIVE)
self.preflist.delete(ACTIVE)
for line in preffile:
if not line.split("\t")[0] == prefname:
preflist.append(line)
preffile.close()
preffile = open(".easyfig.pref", "w")
for i in preflist:
preffile.write(i)
preffile.close()
def setdefault(self):
preffile = open(".easyfig.pref")
preflist = []
prefname = self.preflist.get(ACTIVE)
if prefname.startswith(">"):
return
templist = []
for line in preffile:
if line.startswith(">"):
line = line[1:]
elif line.split("\t")[0] == prefname:
line = ">" + line
templist.append(line.split("\t")[0])
preflist.append(line)
preffile.close()
preffile = open(".easyfig.pref", "w")
for i in preflist:
preffile.write(i)
self.preflist.delete(0, END)
templist.append("easyfig_standard")
templist.sort(key=str.lower)
for i in templist:
self.preflist.insert(END, i)
preffile.close()
def closeprefwin(self):
self.prefwin.destroy()
def openref(self):
webbrowser.open_new("http://www.ncbi.nlm.nih.gov/pubmed/21278367")
def pickposcol(self):
colour = tkColorChooser.askcolor(self.poscol, parent=self.graphoptionswindow)
if colour != None:
self.poscol = colour[0]
self.poscolhex = colour[1]
self.poscollabel.configure(bg=colour[1])
def picknegcol(self):
colour = tkColorChooser.askcolor(self.negcol, parent=self.graphoptionswindow)
if colour != None:
self.negcol = colour[0]
self.negcolhex = colour[1]
self.negcollabel.configure(bg=colour[1])
def getGCcontent(self, filename, mincut, maxcut):
try:
gen = open(filename)
getseq = False
getembl = False
seq = ""
for line in gen:
if line.startswith("ORIGIN"):
getseq = True
elif line.startswith("SQ Sequence"):
getembl = True
elif line.startswith("//"):
getseq = False
getembl = False
elif getseq:
seq += "".join(line.split()[1:])
elif getembl:
seq += "".join(line.split()[:-1])
gen.close()
seq = seq.upper()
except:
tkMessageBox.showerror(
"Try again.",
"feature file " + filename + " not valid, or does not exist.",
)
return None
if len(seq) == 0:
tkMessageBox.showerror(
"Try again.", "Sequence not found in feature file " + filename + "."
)
return None
if maxcut == "Max":
seq = seq[int(mincut) - 1 :]
elif int(maxcut) <= int(mincut):
seq = seq[int(mincut) - 1 :] + seq[: int(maxcut) + 1]
else:
seq = seq[int(mincut) - 1 : int(maxcut) + 1]
window1 = int(self.windsize.get()) / 2
window2 = int(self.windsize.get()) - window1
thearray = []
for i in range(0, len(seq), int(self.step.get())):
seqstring = seq[max([0, i - window1]) : i + window2]
thearray.append(
(seqstring.count("G") + seqstring.count("C")) * 1.0 / len(seqstring)
- 0.5
)
return thearray
def getGCskew(self, filename, mincut, maxcut):
try:
getseq = False
getembl = False
seq = ""
gen = open(filename)
for line in gen:
if line.startswith("ORIGIN"):
getseq = True
elif line.startswith("SQ Sequence"):
getembl = True
elif line.startswith("//"):
getseq = False
getembl = False
elif getseq:
seq += "".join(line.split()[1:])
elif getembl:
seq += "".join(line.split()[:-1])
gen.close()
seq = seq.upper()
except:
tkMessageBox.showerror(
"Try again.",
"feature file " + filename + " not valid, or does not exist.",
)
return None
if len(seq) == 0:
tkMessageBox.showerror(
"Try again.", "Sequence not found in feature file " + filename + "."
)
return None
window1 = int(self.windsize.get()) / 2
window2 = int(self.windsize.get()) - window1
if maxcut == "Max":
seq = seq[int(mincut) - 1 :]
elif int(maxcut) <= int(mincut):
seq = seq[int(mincut) - 1 :] + seq[: int(maxcut) + 1]
else:
seq = seq[int(mincut) - 1 : int(maxcut) + 1]
thearray = []
for i in range(0, len(seq), int(self.step.get())):
seqstring = seq[max([0, i - window1]) : i + window2]
gcount = seqstring.count("G")
ccount = seqstring.count("C")
try:
thearray.append((gcount - ccount) * 1.0 / (gcount + ccount))
except:
thearray.append(0)
return thearray
def getCoverage(self):
# DEFNIITION: takes a file and reads in all contigs, their start positions and the reads located within the contig
# REQUIRES: a valid ace file
# RETURNS: A list of objects of class contig
seq = ""
getseq = False
getembl = False
try:
gen = open(self.gen1.get())
for line in gen:
if line.startswith("ORIGIN"):
getseq = True
elif line.startswith("SQ Sequence"):
getembl = True
elif line.startswith("//"):
getseq = False
getembl = False
elif getseq:
seq += "".join(line.split()[1:])
elif getembl:
seq += "".join(line.split()[:-1])
gen.close()
except:
tkMessageBox.showerror(
"Try again.", "feature file not valid, or does not exist."
)
return None
if len(seq) == 0:
tkMessageBox.showerror("Try again.", "Sequence not found in feature file.")
return None
seq = seq.lower()
if self.gen1maxcut.get() == "Max":
seq = seq[int(self.gen1mincut.get()) - 1 :]
elif int(self.gen1maxcut) <= int(self.gen1mincut):
seq = seq[int(self.gen1mincut) - 1 :] + seq[: int(self.gen1maxcut) + 1]
else:
seq = seq[int(self.gen1mincut.get()) - 1 : (self.gen1maxcut.get()) + 1]
outlist = [0 for i in range(len(seq))]
readlist = [] # list of reads to be added to the contig class
index = 0 # switches to 1 once program has dealt with the initial contig
# iterates through the file determines what information is contained in each line then reads it to the
# right locationregular expressions python
transtab = string.maketrans("atgc", "tacg")
for line in file:
# puts name in file and starts reading sequence below
if line.startswith("CO "):
if index != 0:
freqDict = {}
for j in readlist:
for k in range(j.startpos, (j.startpos + j.readLength)):
if k in freqDict:
freqDict[k] += 1
else:
freqDict[k] = 1
coverageList = []
for j in range(1, len(contigSeq) + 1):
if contigSeq[j - 1] != "*":
coverageList.append(freqDict[j])
contigSeq = contigSeq.lower()
thepos = seq.find(contigSeq)
if thepos != -1:
outlist = (
outlist[:thepos]
+ coverageList
+ outlist[thepos + len(coverageList) :]
)
else:
contigSeq = contigSeq[::-1]
contigSeq = contigSeq.translate(transtab)
thepos = seq.find(contigSeq)
if thepos != -1:
coverageList.reverse()
outlist = (
outlist[:thepos]
+ coverageList
+ outlist[thepos + len(coverageList) :]
)
readlist = []
index = 1
contigSeq = ""
contigName = line.split()[
1
] # splits the line into a list with elements seperated by whitespace characters
# then returns the second element of that list (the name)
readnumber = 0 # initiates the read number used to determine where the readsequence will be added
# creates a object of class read with the name and location within the contig, leaves sequence as the
# empty string to be read in later
elif line.startswith("BQ"):
index = 2
elif line.startswith("AF "):
readIt = (
line.split()
) # splits the line into a list of strings seperated by whitespace characters
readName = readIt[1] # the name of the read
readPos = int(readIt[3]) # the position of the read within the contig
readInstance = read(
readName, readPos, None
) # creates an instance of class read
readlist.append(readInstance) # appends to list
elif index == 1:
contigSeq += line[:-1]
elif line.startswith("QA "):
readlist[readnumber].startpos = (
readlist[readnumber].startpos + int(line.split()[1]) - 1
)
readlist[readnumber].readLength = (
int(line.split()[2]) - int(line.split()[1]) + 1
)
readnumber += 1
freqDict = {}
for j in readlist:
for k in range(j.startpos, (j.startpos + j.readLength)):
if k in freqDict:
freqDict[k] += 1
else:
freqDict[k] = 1
coverageList = []
for j in range(1, len(contigSeq) + 1):
if contigSeq[j - 1] != "*":
coverageList.append(freqDict[j])
contigSeq = contigSeq.lower()
thepos = seq.find(contigSeq)
if thepos != -1:
outlist = (
outlist[:thepos] + coverageList + outlist[thepos + len(coverageList) :]
)
else:
contigSeq = contigSeq[::-1]
contigSeq = contigSeq.translate(transtab)
thepos = seq.find(contigSeq)
if thepos != -1:
coverageList.reverse()
outlist = (
outlist[:thepos]
+ coverageList
+ outlist[thepos + len(coverageList) :]
)
return outlist
def getCustom(self):
try:
thearray = []
gen = open(self.graphfile.get())
templine = gen.readline().rstrip().split("\t")
linelen = len(templine)
for i in templine:
thearray.append([float(i)])
for line in gen:
templine = line.rstrip().split("\t")
for i in range(len(templine)):
if templine[i] != "":
thearray[i].append(float(templine[i]))
return thearray
except:
tkMessageBox.showerror(
"Try again.", "graph file not valid, or does not exist."
)
return None
def graphtypechanges(self, something):
if something == "None":
self.alloroneentry.config(state=DISABLED)
self.graphfileentry.config(state=DISABLED)
self.graphfilebut.config(state=DISABLED)
self.stepentry.config(state=DISABLED)
self.windsizeentry.config(state=DISABLED)
self.graphheightentry.config(state=DISABLED)
self.maxyentry.config(state=DISABLED)
self.histoentry.config(state=DISABLED)
self.graphlinetentry.config(state=DISABLED)
self.poscolbutton.config(state=DISABLED)
self.negcolbutton.config(state=DISABLED)
self.logitbut.config(state=DISABLED)
self.ggapentry.config(state=DISABLED)
elif something == "GC Content":
self.alloroneentry.config(state=NORMAL)
self.graphfileentry.config(state=DISABLED)
self.graphfilebut.config(state=DISABLED)
self.stepentry.config(state=NORMAL)
self.windsizeentry.config(state=NORMAL)
self.graphheightentry.config(state=NORMAL)
self.maxyentry.config(state=NORMAL)
self.histoentry.config(state=NORMAL)
self.graphlinetentry.config(state=NORMAL)
self.poscolbutton.config(state=NORMAL)
self.negcolbutton.config(state=NORMAL)
self.logitbut.config(state=DISABLED)
self.ggapentry.config(state=NORMAL)
elif something == "GC Skew":
self.alloroneentry.config(state=NORMAL)
self.graphfileentry.config(state=DISABLED)
self.graphfilebut.config(state=DISABLED)
self.stepentry.config(state=NORMAL)
self.windsizeentry.config(state=NORMAL)
self.graphheightentry.config(state=NORMAL)
self.maxyentry.config(state=NORMAL)
self.histoentry.config(state=NORMAL)
self.graphlinetentry.config(state=NORMAL)
self.poscolbutton.config(state=NORMAL)
self.negcolbutton.config(state=NORMAL)
self.logitbut.config(state=DISABLED)
self.ggapentry.config(state=NORMAL)
elif something == "Coverage":
self.alloroneentry.config(state=DISABLED)
self.graphfileentry.config(state=NORMAL)
self.graphfilebut.config(state=NORMAL)
self.stepentry.config(state=DISABLED)
self.windsizeentry.config(state=NORMAL)
self.graphheightentry.config(state=NORMAL)
self.maxyentry.config(state=NORMAL)
self.histoentry.config(state=NORMAL)
self.graphlinetentry.config(state=NORMAL)
self.poscolbutton.config(state=NORMAL)
self.negcolbutton.config(state=DISABLED)
self.logitbut.config(state=NORMAL)
self.ggapentry.config(state=NORMAL)
elif something == "Custom":
self.alloroneentry.config(state=NORMAL)
self.graphfileentry.config(state=NORMAL)
self.graphfilebut.config(state=NORMAL)
self.stepentry.config(state=DISABLED)
self.windsizeentry.config(state=DISABLED)
self.graphheightentry.config(state=NORMAL)
self.maxyentry.config(state=NORMAL)
self.histoentry.config(state=NORMAL)
self.graphlinetentry.config(state=NORMAL)
self.poscolbutton.config(state=NORMAL)
self.negcolbutton.config(state=NORMAL)
self.logitbut.config(state=NORMAL)
self.ggapentry.config(state=NORMAL)
def figureoptions(self):
try:
self.figureoptionswindow.destroy()
except:
pass
self.figureoptionswindow = Toplevel()
self.figureoptionswindow.title("Figure")
self.frame2 = Frame(self.figureoptionswindow)
self.mainoptionslab = Label(
self.frame2, text="Figure Options", font="TkDefaultFont 13 bold underline"
)
self.mainoptionslab.grid(row=0, column=0)
self.figwidthlabel = Label(self.frame2, text="Width of Figure (pixels):")
self.figwidthlabel.grid(row=1, column=0)
self.figwidthentry = Entry(self.frame2, textvariable=self.figwidthvar)
self.figwidthentry.grid(row=1, column=1)
self.gltlabel = Label(self.frame2, text="Thickness of genome line:")
self.gltlabel.grid(row=2, column=0)
self.gltentry = Entry(self.frame2, textvariable=self.gltvar)
self.gltentry.grid(row=2, column=1)
self.height1label = Label(self.frame2, text="Height of genes in figure:")
self.height1label.grid(row=3, column=0)
self.height1entry = Entry(self.frame2, textvariable=self.height1var)
self.height1entry.grid(row=3, column=1)
self.height2label = Label(self.frame2, text="Height of Blast hits in figure:")
self.height2label.grid(row=4, column=0)
self.height2entry = Entry(self.frame2, textvariable=self.height2var)
self.height2entry.grid(row=4, column=1)
self.alnlabel = Label(self.frame2, text="Alignment of genomes:")
self.alnlabel.grid(row=5, column=0)
self.alnentry = OptionMenu(
self.frame2, self.aln, "left", "centre", "right", "best blast"
)
self.alnentry.config(width=5)
self.alnentry.grid(row=5, column=1, sticky=EW)
self.legendoptionslab = Label(
self.frame2, text="Legend Options", font="TkDefaultFont 13 bold"
)
self.legendoptionslab.grid(row=6, column=0)
self.drawfig1label = Label(self.frame2, text="Draw Blast identity legend?")
self.drawfig1label.grid(row=7, column=0)
self.drawfig1entry = Checkbutton(self.frame2, variable=self.drawfig1)
self.drawfig1entry.grid(row=7, column=1)
self.drawfig2label = Label(
self.frame2, text="Length of scale legend (in base pairs):"
)
self.drawfig2label.grid(row=8, column=0)
self.drawfig2entry = Entry(self.frame2, textvariable=self.drawfig2var)
self.drawfig2entry.grid(row=8, column=1)
self.leg2label = Label(self.frame2, text="Feature Legend:")
self.leg2label.grid(row=9, column=0)
self.leg2entry = OptionMenu(
self.frame2, self.leg2, "None", "Single column", "Two columns"
)
self.leg2entry.grid(row=9, column=1, sticky=EW)
self.legnamelabel = Label(self.frame2, text="Get feature name from")
self.legnamelabel.grid(row=10, column=0)
self.legnameentry = OptionMenu(
self.frame2, self.legname, "gene", "product", "locus_tag", "note"
)
self.legnameentry.grid(row=10, column=1, sticky=EW)
self.figureoptionsclosebutton = Button(
self.frame2, text="close", command=self.figureoptionsclose
)
self.figureoptionsclosebutton.grid(row=11, column=1, sticky=E, pady=5)
self.figureoptionswindow.geometry("+30+40")
self.frame2.grid(padx=30, pady=10)
def figureoptionsclose(self):
self.figureoptionswindow.destroy()
def blastoptions(self):
try:
self.blastoptionswindow.destroy()
except:
pass
self.blastoptionswindow = Toplevel()
self.blastoptionswindow.title("Blast")
self.frame3 = Frame(self.blastoptionswindow)
self.blastoptionslab = Label(
self.frame3, text="Blast Options", font="TkDefaultFont 13 bold underline"
)
self.blastoptionslab.grid(row=0, column=0)
self.minlengthlabel = Label(self.frame3, text="Min. length:")
self.minlengthlabel.grid(row=1, column=0)
self.minlengthentry = Entry(self.frame3, textvariable=self.minlengthvar)
self.minlengthentry.grid(row=1, column=1, columnspan=4)
self.minevallabel = Label(self.frame3, text="Max. e Value:")
self.minevallabel.grid(row=2, column=0)
self.minevalentry = Entry(self.frame3, textvariable=self.minevalvar)
self.minevalentry.grid(row=2, column=1, columnspan=4)
self.minIdentlabel = Label(self.frame3, text="Min. Identity value:")
self.minIdentlabel.grid(row=3, column=0)
self.minIdententry = Entry(self.frame3, textvariable=self.minIdentvar)
self.minIdententry.grid(row=3, column=1, columnspan=4)
self.blastlabel = Label(self.frame3, text="normal")
self.blastlabel.grid(row=4, column=1, columnspan=2)
self.blastilabel = Label(self.frame3, text="inverted")
self.blastilabel.grid(row=4, column=3, columnspan=2)
self.minblastctag = Label(self.frame3, text="Choose minimum blast colour:")
self.minblastctag.grid(row=5, column=0)
self.minblastcentry = Button(self.frame3, text="...", command=self.getminblastc)
self.minblastcentry.grid(row=5, column=2)
self.minblastclabel = Label(
self.frame3, width=3, bg=self.minblastchex, relief=RIDGE
)
self.minblastclabel.grid(row=5, column=1)
self.minblastcentryi = Button(
self.frame3, text="...", command=self.getminblastci
)
self.minblastcentryi.grid(row=5, column=4)
self.minblastclabeli = Label(
self.frame3, width=3, bg=self.minblastcihex, relief=RIDGE
)
self.minblastclabeli.grid(row=5, column=3)
self.maxblastctag = Label(self.frame3, text="Choose maximum blast colour:")
self.maxblastctag.grid(row=6, column=0)
self.maxblastcentry = Button(self.frame3, text="...", command=self.getmaxblastc)
self.maxblastcentry.grid(row=6, column=2)
self.maxblastclabel = Label(
self.frame3, width=3, bg=self.maxblastchex, relief=RIDGE
)
self.maxblastclabel.grid(row=6, column=1)
self.maxblastcentryi = Button(
self.frame3, text="...", command=self.getmaxblastci
)
self.maxblastcentryi.grid(row=6, column=4)
self.maxblastclabeli = Label(
self.frame3, width=3, bg=self.maxblastcihex, relief=RIDGE
)
self.maxblastclabeli.grid(row=6, column=3)
self.blastoutlinetag = Label(self.frame3, text="Outline blast hits in black:")
self.blastoutlinetag.grid(row=7, column=0)
self.blastoutlineentry = Checkbutton(self.frame3, variable=self.blastoutline)
self.blastoutlineentry.grid(row=7, column=1, columnspan=4)
self.autodetectlab = Label(
self.frame3, text="Filter small blast hits/annotations:"
)
self.autodetectlab.grid(row=8, column=0)
self.autodetectentry = Checkbutton(self.frame3, variable=self.autodetect)
self.autodetectentry.grid(row=8, column=1, columnspan=4)
self.blastoptionsclosebutton = Button(
self.frame3, text="close", command=self.blastoptionsclose
)
self.blastoptionsclosebutton.grid(
row=9, column=1, columnspan=4, sticky=E, pady=5
)
self.blastoptionswindow.geometry("+30+40")
self.frame3.grid(padx=30, pady=10)
def blastoptionsclose(self):
self.blastoptionswindow.destroy()
def annotateoptions(self):
try:
self.annotateoptionswindow.destroy()
except:
pass
self.annotateoptionswindow = Toplevel()
self.annotateoptionswindow.title("Annotation")
self.frame4 = Frame(self.annotateoptionswindow)
self.annotLab = Label(
self.frame4,
text="Annotation Options",
font="TkDefaultFont 13 bold underline",
)
self.annotLab.grid(row=0, column=0)
self.leglabel = Label(self.frame4, text="Feature Labels:")
self.leglabel.grid(row=1, column=0)
self.leg = StringVar(value="None")
self.legentry = OptionMenu(
self.frame4, self.leg, "None", "Top", "Bottom", "Top & Bottom"
)
self.legentry.config(width=5)
self.legentry.grid(row=1, column=1, columnspan=4, sticky=EW)
self.legnamelabel = Label(self.frame4, text="Get feature name from:")
self.legnamelabel.grid(row=2, column=0)
self.legnameentry = OptionMenu(
self.frame4, self.legname, "gene", "product", "locus_tag", "note"
)
self.legnameentry.grid(row=2, column=1, columnspan=4, sticky=EW)
self.exontlabel = Label(self.frame4, text="Thickness of exon lines:")
self.exontlabel.grid(row=3, column=0)
self.exontentry = Entry(self.frame4, textvariable=self.exontvar)
self.exontentry.grid(row=3, column=1, columnspan=4)
self.genetlabel = Label(self.frame4, text="Thickness of gene outlines:")
self.genetlabel.grid(row=4, column=0)
self.genetentry = Entry(self.frame4, textvariable=self.genetvar)
self.genetentry.grid(row=4, column=1, columnspan=4)
self.featlabel = Label(
self.frame4, text="Include following features", font="TkDefaultFont 13 bold"
)
self.featlabel.grid(row=5, column=0)
self.featcolour = Label(
self.frame4, text="Colour", font="TkDefaultFont 13 bold"
)
self.featcolour.grid(row=5, column=1, columnspan=2)
self.featshape = Label(self.frame4, text="type", font="TkDefaultFont 13 bold")
self.featshape.grid(row=5, column=3, columnspan=2)
self.geneflabel = Label(self.frame4, text="gene")
self.geneflabel.grid(row=6, column=0)
self.genefentry = Checkbutton(self.frame4, variable=self.genef)
self.genefentry.grid(row=6, column=0, sticky=E)
self.genefcolourBut = Button(
self.frame4, width=1, height=1, text="...", command=self.pickcolourgene
)
self.genefcolourBut.grid(row=6, column=2)
self.genefcolourlabel = Label(
self.frame4, width=3, bg=self.genefcolourhex, relief=RIDGE
)
self.genefcolourlabel.grid(row=6, column=1)
self.genefrectentry = OptionMenu(
self.frame4, self.genefrect, "arrow", "rect", "frame", "pointer"
)
self.genefrectentry.config(width=10)
self.genefrectentry.grid(row=6, column=3, columnspan=2, sticky=EW)
self.cdsflabel = Label(self.frame4, text="CDS")
self.cdsflabel.grid(row=7, column=0)
self.cdsfentry = Checkbutton(self.frame4, variable=self.cdsf)
self.cdsfentry.grid(row=7, column=0, sticky=E)
self.cdsfcolourBut = Button(
self.frame4, width=1, height=1, text="...", command=self.pickcolourcds
)
self.cdsfcolourBut.grid(row=7, column=2)
self.cdsfcolourlabel = Label(
self.frame4, width=3, bg=self.cdsfcolourhex, relief=RIDGE
)
self.cdsfcolourlabel.grid(row=7, column=1)
self.cdsfrectentry = OptionMenu(
self.frame4, self.cdsfrect, "arrow", "rect", "frame", "pointer"
)
self.cdsfrectentry.config(width=6)
self.cdsfrectentry.grid(row=7, column=3, columnspan=2, sticky=EW)
self.trnaflabel = Label(self.frame4, text="tRNA")
self.trnaflabel.grid(row=8, column=0)
self.trnafentry = Checkbutton(self.frame4, variable=self.trnaf)
self.trnafentry.grid(row=8, column=0, sticky=E)
self.trnafcolourBut = Button(
self.frame4, width=1, height=1, text="...", command=self.pickcolourtrna
)
self.trnafcolourBut.grid(row=8, column=2)
self.trnafcolourlabel = Label(
self.frame4, width=3, bg=self.trnafcolourhex, relief=RIDGE
)
self.trnafcolourlabel.grid(row=8, column=1)
self.trnafrectentry = OptionMenu(
self.frame4, self.trnafrect, "arrow", "rect", "frame", "pointer"
)
self.trnafrectentry.config(width=6)
self.trnafrectentry.grid(row=8, column=3, columnspan=2, sticky=EW)
self.miscflabel = Label(self.frame4, text="misc_feature")
self.miscflabel.grid(row=9, column=0)
self.miscfentry = Checkbutton(self.frame4, variable=self.miscf)
self.miscfentry.grid(row=9, column=0, sticky=E)
self.miscfcolourBut = Button(
self.frame4, width=1, height=1, text="...", command=self.pickcolourmisc
)
self.miscfcolourBut.grid(row=9, column=2)
self.miscfcolourlabel = Label(
self.frame4, width=3, bg=self.miscfcolourhex, relief=RIDGE
)
self.miscfcolourlabel.grid(row=9, column=1)
self.miscfrectentry = OptionMenu(
self.frame4, self.miscfrect, "arrow", "rect", "frame", "pointer"
)
self.miscfrectentry.config(width=6)
self.miscfrectentry.grid(row=9, column=3, columnspan=2, sticky=EW)
self.randflabel = Entry(self.frame4, textvariable=self.randfeat)
self.randflabel.grid(row=10, column=0)
self.randfentry = Checkbutton(self.frame4, variable=self.randf)
self.randfentry.grid(row=10, column=0, sticky=E)
self.randfcolourBut = Button(
self.frame4, width=1, height=1, text="...", command=self.pickcolourrand
)
self.randfcolourBut.grid(row=10, column=2)
self.randfcolourlabel = Label(
self.frame4, width=3, bg=self.randfcolourhex, relief=RIDGE
)
self.randfcolourlabel.grid(row=10, column=1)
self.randfrectentry = OptionMenu(
self.frame4, self.randfrect, "arrow", "rect", "frame", "pointer"
)
self.randfrectentry.config(width=6)
self.randfrectentry.grid(row=10, column=3, columnspan=2, sticky=EW)
self.annotateoptionsclosebutton = Button(
self.frame4, text="close", command=self.annotateoptionsclose
)
self.annotateoptionsclosebutton.grid(
row=11, column=3, columnspan=2, sticky=E, pady=5
)
self.annotateoptionswindow.geometry("+30+40")
self.frame4.grid(padx=30, pady=10)
def annotateoptionsclose(self):
self.annotateoptionswindow.destroy()
def graphoptions(self):
try:
self.graphoptionswindow.destroy()
except:
pass
self.graphoptionswindow = Toplevel()
self.graphoptionswindow.title("Graph")
self.frame5 = Frame(self.graphoptionswindow)
self.graphlabel = Label(
self.frame5, text="Graph options", font="TkDefaultFont 13 bold"
)
self.graphlabel.grid(row=0, column=0)
self.graphtypelabel = Label(self.frame5, text="Graph:")
self.graphtypelabel.grid(row=1, column=0)
self.graphtypeentry = OptionMenu(
self.frame5,
self.graphtype,
"None",
"GC Content",
"GC Skew",
"Coverage",
"Custom",
command=self.graphtypechanges,
)
self.graphtypeentry.grid(row=1, column=1)
self.alloronelabel = Label(self.frame5, text="Multiple graphs:")
self.alloronelabel.grid(row=2, column=0)
self.alloroneentry = Checkbutton(
self.frame5, variable=self.allorone, state=DISABLED
)
self.alloroneentry.grid(row=2, column=1)
self.graphfilelabel = Label(self.frame5, text="Input file:")
self.graphfilelabel.grid(row=3, column=0)
self.graphfileentry = Entry(
self.frame5, textvariable=self.graphfile, state=DISABLED
)
self.graphfileentry.grid(row=3, column=1)
self.graphfilebut = Button(
self.frame5,
width=1,
height=1,
text="...",
command=self.opengraphfile,
state=DISABLED,
)
self.graphfilebut.grid(row=3, column=2)
self.steplabel = Label(self.frame5, text="Step size:")
self.steplabel.grid(row=4, column=0)
self.stepentry = Entry(self.frame5, textvariable=self.step, state=DISABLED)
self.stepentry.grid(row=4, column=1)
self.windsizelabel = Label(self.frame5, text="Window size:")
self.windsizelabel.grid(row=5, column=0)
self.windsizeentry = Entry(
self.frame5, textvariable=self.windsize, state=DISABLED
)
self.windsizeentry.grid(row=5, column=1)
self.graphheightlabel = Label(self.frame5, text="Graph Height:")
self.graphheightlabel.grid(row=6, column=0)
self.graphheightentry = Entry(
self.frame5, textvariable=self.graphheight, state=DISABLED
)
self.graphheightentry.grid(row=6, column=1)
self.maxylabel = Label(self.frame5, text="Maximum Y:")
self.maxylabel.grid(row=7, column=0)
self.maxyentry = Entry(self.frame5, textvariable=self.maxy, state=DISABLED)
self.maxyentry.grid(row=7, column=1)
self.logitlabel = Label(self.frame5, text="Log scale (log10):")
self.logitlabel.grid(row=8, column=0)
self.logitbut = Checkbutton(self.frame5, variable=self.logit, state=DISABLED)
self.logitbut.grid(row=8, column=1)
self.histolabel = Label(self.frame5, text="Graph Type:")
self.histolabel.grid(row=9, column=0)
self.histoentry = OptionMenu(self.frame5, self.histo, "Histogram", "Line")
self.histoentry.config(state=DISABLED)
self.histoentry.grid(row=9, column=1)
self.graphlinetlabel = Label(self.frame5, text="Axis line thickness:")
self.graphlinetlabel.grid(row=10, column=0)
self.graphlinetentry = Entry(
self.frame5, textvariable=self.graphlinet, state=DISABLED
)
self.graphlinetentry.grid(row=10, column=1)
self.poslabel = Label(self.frame5, text="Positive value colour:")
self.poslabel.grid(row=11, column=0)
self.poscollabel = Label(self.frame5, width=3, bg=self.poscolhex, relief=RIDGE)
self.poscollabel.grid(row=11, column=1, sticky=EW)
self.poscolbutton = Button(
self.frame5,
width=1,
height=1,
text="...",
command=self.pickposcol,
state=DISABLED,
)
self.poscolbutton.grid(row=11, column=2)
self.neglabel = Label(self.frame5, text="Negative value colour:")
self.neglabel.grid(row=12, column=0)
self.negcollabel = Label(self.frame5, width=3, bg=self.negcolhex, relief=RIDGE)
self.negcollabel.grid(row=12, column=1, sticky=EW)
self.negcolbutton = Button(
self.frame5,
width=1,
height=1,
text="...",
command=self.picknegcol,
state=DISABLED,
)
self.negcolbutton.grid(row=12, column=2)
self.ggaplabel = Label(self.frame5, text="Gap between graph and figure:")
self.ggaplabel.grid(row=13, column=0)
self.ggapentry = Entry(self.frame5, textvariable=self.ggap, state=DISABLED)
self.ggapentry.grid(row=13, column=1)
self.graphoptionsclosebutton = Button(
self.frame5, text="close", command=self.graphoptionsclose
)
self.graphoptionsclosebutton.grid(
row=14, column=1, columnspan=2, sticky=E, pady=5
)
self.graphoptionswindow.geometry("+30+40")
self.graphtypechanges(self.graphtype.get())
self.frame5.grid(padx=30, pady=10)
def graphoptionsclose(self):
self.graphoptionswindow.destroy()
def opengraphfile(self):
filename = tkFileDialog.askopenfilename(parent=self.graphoptionswindow)
self.graphfile.set(filename)
def pickcolourgene(self):
colour = tkColorChooser.askcolor(
self.genefcolour, parent=self.annotateoptionswindow
)
if colour != None:
self.genefcolour = colour[0]
self.genefcolourhex = colour[1]
self.genefcolourlabel.configure(bg=colour[1])
def pickcolourcds(self):
colour = tkColorChooser.askcolor(
self.cdsfcolour, parent=self.annotateoptionswindow
)
if colour != None:
self.cdsfcolour = colour[0]
self.cdsfcolourhex = colour[1]
self.cdsfcolourlabel.configure(bg=colour[1])
def pickcolourtrna(self):
colour = tkColorChooser.askcolor(
self.trnafcolour, parent=self.annotateoptionswindow
)
if colour != None:
self.trnafcolour = colour[0]
self.trnafcolourhex = colour[1]
self.trnafcolourlabel.configure(bg=colour[1])
def pickcolourrand(self):
colour = tkColorChooser.askcolor(
self.randfcolour, parent=self.annotateoptionswindow
)
if colour != None:
self.randfcolour = colour[0]
self.randfcolourhex = colour[1]
self.randfcolourlabel.configure(bg=colour[1])
def pickcolourmisc(self):
colour = tkColorChooser.askcolor(
self.miscfcolour, parent=self.annotateoptionswindow
)
if colour != None:
self.miscfcolour = colour[0]
self.miscfcolourhex = colour[1]
self.miscfcolourlabel.configure(bg=colour[1])
def getminblastc(self):
colour = tkColorChooser.askcolor(self.minblastc, parent=self.blastoptionswindow)
if colour != None:
self.minblastc = colour[0]
self.minblastchex = colour[1]
self.minblastclabel.configure(bg=colour[1])
def getmaxblastc(self):
colour = tkColorChooser.askcolor(self.maxblastc, parent=self.blastoptionswindow)
if colour != None:
self.maxblastc = colour[0]
self.maxblastchex = colour[1]
self.maxblastclabel.configure(bg=colour[1])
def getminblastci(self):
colour = tkColorChooser.askcolor(
self.minblastci, parent=self.blastoptionswindow
)
if colour != None:
self.minblastci = colour[0]
self.minblastcihex = colour[1]
self.minblastclabeli.configure(bg=colour[1])
def getmaxblastci(self):
colour = tkColorChooser.askcolor(
self.maxblastci, parent=self.blastoptionswindow
)
if colour != None:
self.maxblastci = colour[0]
self.maxblastcihex = colour[1]
self.maxblastclabeli.configure(bg=colour[1])
def makeFigure(self):
global abortCaptain
if self.outfile.get() == "" and not self.filetype.get().startswith("Preview"):
self.getoutfile()
if self.outfile.get() == "" and not self.filetype.get().startswith("Preview"):
return None
try:
if self.thegenblast.isAlive():
tkMessageBox.showerror("Please wait", "BLAST already running.")
return None
except:
pass
if self.running:
abortCaptain = True
else:
abortCaptain = False
self.running = True
self.createFigure.config(text="Cancel Figure")
try:
self.minlength = int(self.minlengthvar.get())
except:
tkMessageBox.showerror(
"Try again.", "Please enter a valid integer for minimum length."
)
self.running = False
self.createFigure.config(text="Create Figure")
return None
try:
self.mineval = float(self.minevalvar.get())
except:
tkMessageBox.showerror(
"Try again.",
"Please enter a valid floating point number for minimum e value.",
)
self.running = False
self.createFigure.config(text="Create Figure")
return None
try:
self.minIdent = float(self.minIdentvar.get())
except:
tkMessageBox.showerror(
"Try again.",
"Please enter a valid floating point number for minimum identity.",
)
self.running = False
self.createFigure.config(text="Create Figure")
return None
try:
self.figwidth = int(self.figwidthvar.get())
except:
tkMessageBox.showerror(
"Try again.", "Please enter a valid integer for figure width."
)
self.running = False
self.createFigure.config(text="Create Figure")
return None
try:
self.height1 = int(self.height1var.get())
except:
tkMessageBox.showerror(
"Try again.", "Please enter a valid integer for height of genes."
)
self.running = False
self.createFigure.config(text="Create Figure")
return None
try:
self.height2 = int(self.height2var.get())
except:
tkMessageBox.showerror(
"Try again.",
"Please enter a valid integer for height of blast matches.",
)
self.running = False
self.createFigure.config(text="Create Figure")
return None
try:
self.glt = int(self.gltvar.get())
except:
tkMessageBox.showerror(
"Try again.",
"Please enter a valid integer for genome line thickness.",
)
self.running = False
self.createFigure.config(text="Create Figure")
return None
try:
self.exont = int(self.exontvar.get())
except:
tkMessageBox.showerror(
"Try again.",
"Please enter a valid integer for exon line thickeness.",
)
self.running = False
self.createFigure.config(text="Create Figure")
return None
try:
self.genet = int(self.genetvar.get())
except:
tkMessageBox.showerror(
"Try again.",
"Please enter a valid integer for exon line thickeness.",
)
self.running = False
self.createFigure.config(text="Create Figure")
return None
self.featDict = {}
if self.genef.get() == 1:
self.featDict["gene"] = (self.genefrect.get(), self.genefcolour)
if self.cdsf.get() == 1:
self.featDict["CDS"] = (self.cdsfrect.get(), self.cdsfcolour)
if self.trnaf.get() == 1:
self.featDict["tRNA"] = (self.trnafrect.get(), self.trnafcolour)
if self.miscf.get() == 1:
self.featDict["misc_feature"] = (self.miscfrect.get(), self.miscfcolour)
if self.randf.get() == 1:
self.featDict[self.randfeat.get()] = (
self.randfrect.get(),
self.randfcolour,
)
self.reverseList = []
self.minmaxlist = []
for i in self.genlist.get(0, END):
self.reverseList.append(self.revlist[i[:2]])
try:
if self.maxcutlist[i[:2]] == "Max":
self.minmaxlist.append((int(self.mincutlist[i[:2]]), "Max"))
else:
self.minmaxlist.append(
(int(self.mincutlist[i[:2]]), int(self.maxcutlist[i[:2]]))
)
except:
tkMessageBox.showerror(
"Try again.",
"Please enter a valid integer cut off points for annotation file "
+ i[:2]
+ ".",
)
self.running = False
self.createFigure.config(text="Create Figure")
return None
if self.graphtype.get() != "None" and (
self.leg.get() == "Top" or self.leg.get() == "Top & Bottom"
):
tkMessageBox.showerror(
"Try again.",
"Please Choose either the graph or the legend to be displayed above the figure.",
)
self.running = False
self.createFigure.config(text="Create Figure")
return None
if self.leg.get() != "None" and self.leg2.get() != "None":
tkMessageBox.showerror(
"Try again.",
"Please Choose either feature labels or a feature legend.",
)
self.running = False
self.createFigure.config(text="Create Figure")
return None
else:
if self.leg.get() == "None":
self.theleg = self.leg2.get()
else:
self.theleg = self.leg.get()
self.inputlist = []
self.graphlist = []
getit = True
nocompare = False
if self.genlist.size() > 1 and self.blastlist.size() == 0:
nocompare = tkMessageBox.askquestion(
"No blast files.",
"Only gene figures will be drawn, continue?\n(To create a comparison figure please generate or manually input a blast file)",
)
if nocompare == "no":
self.running = False
self.createFigure.config(text="Create Figure")
return None
else:
nocompare = True
if self.genlist.size() > 0:
if self.graphtype.get() == "GC Content":
self.graphlist.append(
self.getGCcontent(
self.genlist.get(0)[4:],
self.mincutlist[self.genlist.get(0)[:2]],
self.maxcutlist[self.genlist.get(0)[:2]],
)
)
elif self.graphtype.get() == "GC Skew":
self.graphlist.append(
self.getGCskew(
self.genlist.get(0)[4:],
self.mincutlist[self.genlist.get(0)[:2]],
self.maxcutlist[self.genlist.get(0)[:2]],
)
)
self.inputlist.append(self.genlist.get(0)[4:])
tempfile = open(self.genlist.get(0)[4:])
getline = True
line = tempfile.readline()
while getline and line != "":
if (
line.startswith("FT source")
or line.startswith(" source")
or line.startswith("ORIGIN")
or line.startswith("SQ Sequence")
or line.startswith(">")
):
getline = False
line = tempfile.readline()
if getline:
self.genlengths = [
tkSimpleDialog.askinteger(
"Length not in file",
"Please enter the length of genome in file 1",
)
]
else:
self.genlengths = [None]
tempfile.close()
else:
tkMessageBox.showerror(
"Try again.", "Please enter at least one feature file."
)
self.running = False
self.createFigure.config(text="Create Figure")
return None
if self.genlist.size() - 1 == self.blastlist.size() or nocompare:
for i in range(self.genlist.size() - 1):
self.inputlist.append(self.blastlist.get(i))
if (
self.graphtype.get() == "GC Content"
and self.allorone.get() == 1
):
self.graphlist.append(
self.getGCcontent(
self.genlist.get(i + 1)[4:],
self.mincutlist[self.genlist.get(i + 1)[:2]],
self.maxcutlist[self.genlist.get(i + 1)[:2]],
)
)
elif self.graphtype.get() == "GC Skew" and self.allorone.get() == 1:
self.graphlist.append(
self.getGCcontent(
self.genlist.get(i + 1)[4:],
self.mincutlist[self.genlist.get(i + 1)[:2]],
self.maxcutlist[self.genlist.get(i + 1)[:2]],
)
)
self.inputlist.append(self.genlist.get(i + 1)[4:])
tempfile = open(self.genlist.get(i + 1)[4:])
getline = True
line = tempfile.readline()
while getline and line != "":
if (
line.startswith("FT source")
or line.startswith(" source")
or line.startswith("ORIGIN")
or line.startswith("SQ Sequence")
or line.startswith(">")
):
getline = False
line = tempfile.readline()
if getline:
self.genlengths.append(
tkSimpleDialog.askinteger(
"Length not in file",
"Please enter the length of genome in file "
+ str(i + 1)
+ ".",
)
)
else:
self.genlengths.append(None)
tempfile.close()
else:
if self.blastlist.size() >= self.genlist.size():
tkMessageBox.showerror("Try again.", "Too many blast files.")
else:
tkMessageBox.showerror("Try again.", "Too few blast files.")
self.running = False
self.createFigure.config(text="Create Figure")
return None
try:
self.drawfig2 = int(self.drawfig2var.get())
if self.drawfig2 == 0:
self.drawfig2 = False
except:
tkMessageBox.showerror(
"Try again.", "Please enter a valid integer for length of figure 2."
)
self.running = False
self.createFigure.config(text="Create Figure")
return None
self.compress = False
if self.drawfig1.get() == 1:
self.vardrawfig1 = True
else:
self.vardrawfig1 = False
if self.blastoutline.get() == 1:
self.varblastoutline = True
else:
self.varblastoutline = False
if self.graphtype.get() == "None":
self.vargraphit = None
elif self.graphtype.get() == "GC Content":
self.vargraphit = [
self.graphlist,
self.poscol,
self.negcol,
int(self.graphheight.get()),
int(self.graphlinet.get()),
self.histo.get(),
self.maxy.get(),
int(self.ggap.get()),
]
if None in self.vargraphit[0]:
self.running = False
self.createFigure.config(text="Create Figure")
return None
elif self.graphtype.get() == "GC Skew":
self.vargraphit = [
self.graphlist,
self.poscol,
self.negcol,
int(self.graphheight.get()),
int(self.graphlinet.get()),
self.histo.get(),
self.maxy.get(),
int(self.ggap.get()),
]
if None in self.vargraphit[0] == None:
self.running = False
self.createFigure.config(text="Create Figure")
return None
elif self.graphtype.get() == "Coverage":
self.vargraphit = [
[self.getCoverage()],
self.poscol,
self.negcol,
int(self.graphheight.get()),
int(self.graphlinet.get()),
self.histo.get(),
self.maxy.get(),
int(self.ggap.get()),
]
if None in self.vargraphit[0]:
self.running = False
self.createFigure.config(text="Create Figure")
return None
elif self.graphtype.get() == "Custom":
tempcustom = self.getCustom()
if self.allorone.get() != 1:
tempcustom = [tempcustom[0]]
self.vargraphit = [
tempcustom,
self.poscol,
self.negcol,
int(self.graphheight.get()),
int(self.graphlinet.get()),
self.histo.get(),
self.maxy.get(),
int(self.ggap.get()),
]
if None in self.vargraphit[0]:
self.running = False
self.createFigure.config(text="Create Figure")
return None
if self.cutstate != None and self.cutstate != (
str(self.mincutlist),
str(self.maxcutlist),
):
tkMessageBox.showerror(
"Try again.",
"Please generate blast files again, blast files do not match modified annotation files.",
)
self.running = False
self.createFigure.config(text="Create Figure")
return None
if self.orderstate != None and self.orderstate != self.genlist.get(0, END):
tkMessageBox.showerror(
"Try again.",
"Please generate blast files again, order of annotation files has been changed.",
)
self.running = False
self.createFigure.config(text="Create Figure")
return None
getit = True
for i in self.inputlist:
if not os.path.exists(i):
if not i == "" and not nocompare:
getit = False
if not getit:
tkMessageBox.showerror("Try again.", "A selected file does not exist.")
self.running = False
self.createFigure.config(text="Create Figure")
return None
else:
self.thethread = threading.Thread(target=self.makeFigure2)
self.thethread.start()
self.thethread2 = threading.Thread(target=self.dotdotdot)
self.thethread2.start()
def dotdotdot(self):
while self.thethread.isAlive():
time.sleep(0.5)
self.processLab.config(text="Drawing figure..")
time.sleep(0.5)
self.processLab.config(text="Drawing figure...")
time.sleep(0.5)
self.processLab.config(text="Drawing figure.")
if self.theminblast == 101:
self.processLab.config(text="Drawing figure...\ncomplete.")
elif self.theminblast == None:
self.processLab.config(text="Drawing figure...\nfailed.")
else:
self.processLab.config(text="Drawing figure...\ncomplete.")
self.running = False
self.createFigure.config(text="Create Figure")
def makeFigure2(self):
if self.filetype.get() == "Bitmap (bmp)":
if self.outfile.get()[-4:].lower() != ".bmp":
theoutfile = self.outfile.get() + ".bmp"
else:
theoutfile = self.outfile.get()
self.theminblast = draw(
theoutfile,
self.minlength,
self.mineval,
self.minIdent,
self.inputlist,
self.figwidth,
self.height1,
self.height2,
self.minblastc,
self.maxblastc,
self.minblastci,
self.maxblastci,
self.vardrawfig1,
self.drawfig2,
False,
self.compress,
self.reverseList,
self.featDict,
self.glt,
self.exont,
self.genet,
self.genlengths,
self.aln.get(),
self.vargraphit,
self.varblastoutline,
self.minmaxlist,
self.autodetect.get() == 1,
self.theleg,
self.legname.get(),
)
elif self.filetype.get() == "Vector file (svg)":
if self.outfile.get()[-4:].lower() != ".svg":
theoutfile = self.outfile.get() + ".svg"
else:
theoutfile = self.outfile.get()
self.theminblast = drawsvg(
theoutfile,
self.minlength,
self.mineval,
self.minIdent,
self.inputlist,
self.figwidth,
self.height1,
self.height2,
self.minblastc,
self.maxblastc,
self.minblastci,
self.maxblastci,
self.vardrawfig1,
self.drawfig2,
False,
self.compress,
self.reverseList,
self.featDict,
self.glt,
self.exont,
self.genet,
self.genlengths,
self.aln.get(),
self.vargraphit,
self.varblastoutline,
self.minmaxlist,
self.autodetect.get() == 1,
self.theleg,
self.legname.get(),
)
else:
self.theminblast = self.getPreview()
def getPreview(self):
try:
self.prevwindow.destroy()
except:
pass
theoutfile = None
if self.filetype.get() == "Preview (1:1)":
testit, self.theminblast, width, height = draw(
theoutfile,
self.minlength,
self.mineval,
self.minIdent,
self.inputlist,
self.figwidth,
self.height1,
self.height2,
self.minblastc,
self.maxblastc,
self.minblastci,
self.maxblastci,
self.vardrawfig1,
self.drawfig2,
False,
self.compress,
self.reverseList,
self.featDict,
self.glt,
self.exont,
self.genet,
self.genlengths,
self.aln.get(),
self.vargraphit,
self.varblastoutline,
self.minmaxlist,
self.autodetect.get() == 1,
self.theleg,
self.legname.get(),
1,
)
else:
testit, self.theminblast, width, height = draw(
theoutfile,
self.minlength,
self.mineval,
self.minIdent,
self.inputlist,
self.figwidth,
self.height1,
self.height2,
self.minblastc,
self.maxblastc,
self.minblastci,
self.maxblastci,
self.vardrawfig1,
self.drawfig2,
False,
self.compress,
self.reverseList,
self.featDict,
self.glt,
self.exont,
self.genet,
self.genlengths,
self.aln.get(),
self.vargraphit,
self.varblastoutline,
self.minmaxlist,
self.autodetect.get() == 1,
self.theleg,
self.legname.get(),
2,
)
self.prevwindow = Toplevel()
self.prevwindow.title("Preview")
self.prevframe = Frame(self.prevwindow)
self.prevwindow.grid_rowconfigure(0, weight=1)
self.prevwindow.grid_columnconfigure(0, weight=1)
self.prevwindow.geometry("+30+40")
self.prevframe.grid(row=0, column=0, sticky=NSEW)
self.prevframe.grid_rowconfigure(0, weight=1)
self.prevframe.grid_columnconfigure(0, weight=1)
xscrollbar = Scrollbar(self.prevframe, orient=HORIZONTAL)
xscrollbar.grid(row=1, column=0, sticky=E + W)
yscrollbar = Scrollbar(self.prevframe)
yscrollbar.grid(row=0, column=1, sticky=N + S)
self.canvas = Canvas(
self.prevframe,
bd=0,
bg="#000000",
scrollregion=(0, 0, width, height),
xscrollcommand=xscrollbar.set,
yscrollcommand=yscrollbar.set,
)
test = PhotoImage(data=testit)
self.canvas.create_image(0, 0, image=test, anchor=NW)
self.canvas.grid(row=0, column=0, sticky=NSEW)
self.canvas.image = test
xscrollbar.config(command=self.canvas.xview)
yscrollbar.config(command=self.canvas.yview)
# label = Label(self.prevframe, image=test)
# label.image = test
# label.grid()
# self.canvas.image = test
return self.theminblast
def gbk2fasta(self, genbank, out, mincut, maxcut):
getseq = False
getembl = False
getmultifa = False
seq = ""
try:
mincut = int(mincut)
if mincut < 1:
mincut = 1
if maxcut != "Max":
maxcut = int(maxcut)
if maxcut < 1:
maxcut = 1
except:
tkMessageBox.showerror("Try again.", "Annotation slice values not valid.")
try:
gen = open(genbank)
outfile = open(out, "w")
for line in gen:
if line.startswith("ORIGIN"):
getseq = True
elif line.startswith("SQ Sequence"):
getembl = True
elif line.startswith(">"):
if getmultifa:
seq += "qqq"
else:
getmultifa = True
elif line.startswith("//"):
getseq = False
getembl = False
elif getseq:
seq += "".join(line.split()[1:])
elif getembl:
seq += "".join(line.split()[:-1])
elif getmultifa:
seq += line.rstrip()
if getmultifa:
getset = set(seq)
rightchars = set("atgcATGCqnNuUyYkKmMsSwWbBdDhHvVxXrR-")
isitgood = True
for i in getset:
if not i in rightchars:
isitgood = False
if not isitgood:
tkMessageBox.showerror(
"Try again.",
"Annotation file contains invalid characters.\
Check genbank/EMBL contains no lines starting with > or that\
fasta file contains only valid nucleotides",
)
return 0
if "/" in out:
outfile.write(">" + out.split("/")[1] + "\n")
else:
outfile.write(">" + out + "\n")
if maxcut == "Max":
maxcut = len(seq)
if mincut == 1 and maxcut == len(seq):
if getmultifa:
seq = seq.replace("qqq", "n" * int(len(seq) / 500))
outfile.write(seq)
elif mincut < maxcut:
seq = seq[mincut - 1 : maxcut]
if getmultifa:
seq = seq.replace("qqq", "n" * int(len(seq) / 500))
outfile.write(seq)
else:
seq = seq[mincut - 1 :] + seq[:maxcut]
if getmultifa:
seq = seq.replace("qqq", "n" * int(len(seq) / 500))
outfile.write(seq)
if len(seq) == 0:
tkMessageBox.showerror(
"Try again.", "There is no sequence in " + genbank + "."
)
return 0
else:
return 1
except:
tkMessageBox.showerror("Try again.", genbank + " does not exist.")
return 0
def genBlast(self):
try:
if self.thegenblast.isAlive():
tkMessageBox.showerror("Please wait", "BLAST already running.")
return None
except:
pass
try:
if self.thethread.isAlive():
tkMessageBox.showerror("Please wait", "easyfig creating figure.")
return None
except:
pass
try:
if self.thedlblast.isAlive():
tkMessageBox.showerror("Please wait", "Blast is downloading.")
return None
except:
pass
self.processLab.config(text="Performing blastn...")
self.workingDir = tkFileDialog.askdirectory(
title="Please select a working directory."
)
if self.workingDir == ():
self.processLab.config(text="Performing blastn...\nCancelled.")
return None
os.chdir(self.workingDir)
index = 0
if self.genlist.size() < 2:
tkMessageBox.showerror(
"Try again.",
"easyfig needs at least 2 genbank files to create blast files.",
)
self.processLab.config(text="Performing blastn...\nCancelled.")
return None
else:
thegenlist = self.genlist.get(0, END)
for i in thegenlist:
index += 1
temp = self.gbk2fasta(
i[4:],
str(index) + ".easyfig.fa",
self.mincutlist[i[:2]],
self.maxcutlist[i[:2]],
)
if temp == 0:
return None
self.cutstate = (str(self.mincutlist), str(self.maxcutlist))
self.orderstate = self.genlist.get(0, END)
if self.blastnDir == None:
pass
elif self.blastnDir[-11:] == "tblastx.exe":
if os.path.exists(self.blastnDir[:-11] + "blastn.exe"):
self.blastnDir = self.blastnDir[:-11] + "blastn.exe"
else:
self.blastnDir = None
elif self.blastnDir[-7:] == "tblastx":
if os.path.exists(self.blastnDir[:-7] + "blastn"):
self.blastnDir = self.blastnDir[:-7] + "blastn"
else:
self.blastnDir = None
if self.dbnDir != None:
if not os.path.exists(self.dbnDir):
self.dbnDir = None
if self.blastnDir != None:
if not os.path.exists(self.blastnDir):
self.blastnDir = None
if self.dbnDir != None:
pass
elif isNewBlastDB():
self.dbnDir = "makeblastdb"
elif isLegBlastDB():
self.dblDir = "formatdb"
elif os.path.exists("~/bin/makeblastdb"):
self.dbnDir = "~/bin/makeblastdb"
elif os.path.exists("/usr/local/ncbi/bin/makeblastdb"):
self.blastnDir = "/usr/local/ncbi/bin/makeblastdb"
elif os.path.exists("/usr/local/bin/makeblastdb"):
self.dbnDir = "/usr/local/bin/makeblastdb"
elif os.path.exists(self.pwd + "/makeblastdb"):
self.dbnDir = self.pwd + "/makeblastdb"
elif os.path.exists("/usr/local/ncbi/blast/bin/makeblastdb"):
self.dbnDir = "/usr/local/ncbi/blast/bin/makeblastdb"
elif os.path.exists("/usr/local/bin/formatdb"):
self.dblDir = "/usr/local/bin/formatdb"
elif os.path.exists("~/bin/formatdb"):
self.dblDir = "~/bin/formatdb"
elif os.path.exists(self.pwd + "/formatdb"):
self.dblDir = self.pwd + "/formatdb"
elif os.path.exists(self.pwd + "/makeblastdb.exe"):
self.dbnDir = self.pwd + "/makeblastdb.exe"
elif os.path.exists(self.pwd + "/formatdb.exe"):
self.dblDir = self.pwd + "/formatdb.exe"
else:
folderlist = []
for letter in string.uppercase:
if os.path.exists(letter + ":/program files/ncbi/"):
folders = os.listdir(letter + ":/program files/ncbi/")
for f in folders:
if f.upper().startswith("BLAST"):
folderlist.append(letter + ":/program files/ncbi/" + f)
folderlist.sort(reverse=True)
blastgot = False
if len(folderlist) > 0:
for f in folderlist:
if not blastgot and os.path.exists(f + "/bin/makeblastdb.exe"):
blastgot = True
self.dblDir = '"' + f + '/bin/makeblastdb"'
if self.blastnDir != None:
pass
elif isNewBlastn():
self.blastnDir = "blastn"
elif isLegBlastall():
self.blastlDir = "blastall"
elif os.path.exists("~/bin/blastn"):
self.blastnDir = "~/bin/blastn"
elif os.path.exists("/usr/local/ncbi/bin/blastn"):
self.blastnDir = "/usr/local/ncbi/bin/blastn"
elif os.path.exists("/usr/local/bin/blastn"):
self.blastnDir = "/usr/local/bin/blastn"
elif os.path.exists(self.pwd + "/blastn"):
self.blastnDir = self.pwd + "/blastn"
elif os.path.exists("/usr/local/ncbi/blast/bin/blastn"):
self.blastnDir = "/usr/local/ncbi/blast/bin/blastn"
elif os.path.exists("/usr/local/bin/blastall"):
self.blastlDir = "/usr/local/bin/blastall"
elif os.path.exists("~/bin/blastall"):
self.blastlDir = "~/bin/blastall"
elif os.path.exists(self.pwd + "/blastall"):
self.blastlDir = self.pwd + "/blastall"
elif os.path.exists(self.pwd + "/blastall.exe"):
self.blastlDir = self.pwd + "/blastall.exe"
elif os.path.exists(self.pwd + "/blastn.exe"):
self.blastnDir = self.pwd + "/blastn.exe"
else:
folderlist = []
for letter in string.uppercase:
if os.path.exists(letter + ":/program files/ncbi/"):
folders = os.listdir(letter + ":/program files/ncbi/")
for f in folders:
if f.upper().startswith("BLAST"):
folderlist.append(letter + ":/program files/ncbi/" + f)
folderlist.sort(reverse=True)
blastgot = False
if len(folderlist) > 0:
for f in folderlist:
if not blastgot and os.path.exists(f + "/bin/blastn.exe"):
blastgot = True
self.blastnDir = '"' + f + '/bin/blastn"'
if (
self.blastnDir == None
and self.blastlDir == None
or self.dbnDir == None
and self.dblDir == None
):
dlblast = tkMessageBox.askquestion(
"Blast not found", "Do you wish to download Blast?"
)
if dlblast != "no":
self.thedlblast = threading.Thread(target=self.downloadBlast)
self.thedlblast.start()
return None
tempdir = tkFileDialog.askdirectory(
title="Please select a directory with blastn and makeblastdb."
)
if tempdir == ():
tempdir = ""
if os.path.exists(tempdir + "/blastn.exe") and os.path.exists(
tempdir + "/makeblastdb.exe"
):
self.blastnDir = tempdir + "/blastn.exe"
self.dbnDir = tempdir + "/makeblastdb.exe"
elif os.path.exists(tempdir + "/blastn") and os.path.exists(
tempdir + "/makeblastdb"
):
self.blastnDir = tempdir + "/blastn"
self.dbnDir = tempdir + "/makeblastdb"
else:
self.processLab.config(
text="Performing blastn...\nInvadild directory.\nBlast not found."
)
self.blastnDir = None
self.dbnDir = None
return None
if self.workingDir == "":
self.processLab.config(text="Performing blastn...\nCancelled.")
return None
self.thegenblast = threading.Thread(target=self.genBlast2)
self.thegenblast.start()
self.thegenblast2 = threading.Thread(target=self.genBlastDot)
self.thegenblast2.start()
def genBlastDot(self):
while self.thegenblast.isAlive():
time.sleep(0.5)
self.processLab.config(text="Performing blastn.")
time.sleep(0.5)
self.processLab.config(text="Performing blastn..")
time.sleep(0.5)
self.processLab.config(text="Performing blastn...")
if self.blastlist.size() == self.genlist.size() - 1:
self.processLab.config(text="Performing blastn...\ncomplete.")
else:
self.processLab.config(
text="Blast has failed, please check genbank files and rerun."
)
def genBlast2(self):
self.blastlist.delete(0, END)
the_tempdb_dir = os.path.abspath(".") + "/tempdb"
for i in range(self.genlist.size() - 1):
if self.dbnDir != None:
subprocess.Popen(
self.dbnDir
+ " -dbtype nucl -out "
+ the_tempdb_dir
+ " -in "
+ str(i + 2)
+ ".easyfig.fa",
shell=True,
).wait()
elif self.dblDir != None:
subprocess.Popen(
self.dblDir
+ " -p F -t tempdb -n tempdb -i "
+ str(i + 2)
+ ".easyfig.fa",
shell=True,
).wait()
if self.blastnDir:
subprocess.Popen(
self.blastnDir
+ " -task blastn -db "
+ the_tempdb_dir
+ " -outfmt 6 -query "
+ str(i + 1)
+ ".easyfig.fa -out "
+ str(i + 1)
+ str(i + 2)
+ ".easyfig.out",
shell=True,
).wait()
elif self.blastlDir:
subprocess.Popen(
self.blastlDir
+ " -p blastn -d "
+ the_tempdb_dir
+ " -F F -m 8 -a 8 -i "
+ str(i + 1)
+ ".easyfig.fa -o "
+ str(i + 1)
+ str(i + 2)
+ ".easyfig.out",
shell=True,
).wait()
self.blastlist.insert(
END, self.workingDir + "/" + str(i + 1) + str(i + 2) + ".easyfig.out"
)
self.blastlist.xview_moveto(1)
if os.path.exists("tempdb.nhr"):
os.remove("tempdb.nhr")
if os.path.exists("tempdb.nin"):
os.remove("tempdb.nin")
if os.path.exists("error.log"):
os.remove("error.log")
if os.path.exists("tempdb.nsq"):
os.remove("tempdb.nsq")
if os.path.exists("formatdb.log"):
os.remove("formatdb.log")
os.chdir(self.pwd)
def genBlastX(self):
try:
if self.thegenblast.isAlive():
tkMessageBox.showerror("Please wait", "BLAST already running.")
return None
except:
pass
try:
if self.thethread.isAlive():
tkMessageBox.showerror("Please wait", "easyfig creating figure.")
return None
except:
pass
try:
if self.thedlblast.isAlive():
tkMessageBox.showerror("Please wait", "Blast is downloading.")
return None
except:
pass
self.workingDir = tkFileDialog.askdirectory(
title="Please select a working directory."
)
if self.workingDir == ():
self.processLab.config(text="Performing tblastx...\nCancelled.")
return
os.chdir(self.workingDir)
index = 0
if self.genlist.size() < 2:
tkMessageBox.showerror(
"Try again.",
"easyfig needs at least 2 genbank files to create blast files.",
)
else:
thegenlist = self.genlist.get(0, END)
for i in thegenlist:
index += 1
temp = self.gbk2fasta(
i[4:],
str(index) + ".easyfig.fa",
self.mincutlist[i[:2]],
self.maxcutlist[i[:2]],
)
if temp == 0:
return None
self.cutstate = (str(self.mincutlist), str(self.maxcutlist))
self.orderstate = self.genlist.get(0, END)
if self.blastnDir == None:
pass
elif self.blastnDir[-10:] == "blastn.exe":
if os.path.exists(self.blastnDir[:-10] + "tblastx.exe"):
self.blastnDir = self.blastnDir[:-10] + "tblastx.exe"
else:
self.blastnDir = None
elif self.blastnDir[-6:] == "blastn":
if os.path.exists(self.blastnDir[:-6] + "tblastx"):
self.blastnDir = self.blastnDir[:-6] + "tblastx"
else:
self.blastnDir = None
if self.dbnDir != None:
if not os.path.exists(self.dbnDir):
self.dbnDir = None
if self.blastnDir != None:
if not os.path.exists(self.blastnDir):
self.blastnDir = None
if self.dbnDir != None:
pass
elif isNewBlastDB():
self.dbnDir = "makeblastdb"
elif isLegBlastDB():
self.dblDir = "formatdb"
elif os.path.exists("~/bin/makeblastdb"):
self.dbnDir = "~/bin/makeblastdb"
elif os.path.exists("/usr/local/bin/makeblastdb"):
self.dbnDir = "/usr/local/bin/makeblastdb"
elif os.path.exists("./makeblastdb"):
self.dbnDir = "./makeblastdb"
elif os.path.exists("/usr/local/ncbi/bin/makeblastdb"):
self.blastnDir = "/usr/local/ncbi/bin/makeblastdb"
elif os.path.exists("/usr/local/ncbi/blast/bin/makeblastdb"):
self.dbnDir = "/usr/local/ncbi/blast/bin/makeblastdb"
elif os.path.exists("/usr/local/bin/formatdb"):
self.dblDir = "/usr/local/bin/formatdb"
elif os.path.exists("~/bin/formatdb"):
self.dblDir = "~/bin/formatdb"
elif os.path.exists("./formatdb"):
self.dblDir = "./formatdb"
elif os.path.exists("./makeblastdb.exe"):
self.dbnDir = "./makeblastdb.exe"
elif os.path.exists("./formatdb.exe"):
self.dblDir = "./formatdb.exe"
else:
folderlist = []
for letter in string.uppercase:
if os.path.exists(letter + ":/program files/ncbi/"):
folders = os.listdir(letter + ":/program files/ncbi/")
for f in folders:
if f.upper().startswith("BLAST"):
folderlist.append(letter + ":/program files/ncbi/" + f)
folderlist.sort(reverse=True)
blastgot = False
if len(folderlist) > 0:
for f in folderlist:
if not blastgot and os.path.exists(f + "/bin/makeblastdb.exe"):
blastgot = True
self.dblDir = '"' + f + '/bin/makeblastdb"'
if self.blastnDir != None:
pass
elif isNewTblastx():
self.blastnDir = "tblastx"
elif isLegBlastall():
self.blastlDir = "blastall"
elif os.path.exists("~/bin/tblastx"):
self.blastnDir = "~/bin/tblastx"
elif os.path.exists("/usr/local/ncbi/bin/tblastx"):
self.blastnDir = "/usr/local/ncbi/bin/tblastx"
elif os.path.exists("/usr/local/bin/tblastx"):
self.blastnDir = "/usr/local/bin/tblastx"
elif os.path.exists("./tblastx"):
self.blastnDir = "./tblastx"
elif os.path.exists("/usr/local/ncbi/blast/bin/tblastx"):
self.blastnDir = "/usr/local/ncbi/blast/bin/tblastx"
elif os.path.exists("/usr/local/bin/blastall"):
self.blastlDir = "/usr/local/bin/blastall"
elif os.path.exists("~/bin/blastall"):
self.blastlDir = "~/bin/blastall"
elif os.path.exists("./blastall"):
self.blastlDir = "./blastall"
elif os.path.exists("./tblastx.exe"):
self.blastnDir = "./tblastx.exe"
else:
folderlist = []
for letter in string.uppercase:
if os.path.exists(letter + ":/program files/ncbi/"):
folders = os.listdir(letter + ":/program files/ncbi/")
for f in folders:
if f.upper().startswith("BLAST"):
folderlist.append(letter + ":/program files/ncbi/" + f)
folderlist.sort(reverse=True)
blastgot = False
if len(folderlist) > 0:
for f in folderlist:
if not blastgot and os.path.exists(f + "/bin/tblastx.exe"):
blastgot = True
self.blastnDir = '"' + f + '/bin/tblastx.exe"'
if (
self.blastnDir == None
and self.blastlDir == None
or self.dbnDir == None
and self.dbl == None
):
dlblast = tkMessageBox.askquestion(
"Blast not found", "Do you wish to download Blast?"
)
if dlblast:
self.thedlblast = threading.Thread(target=self.downloadBlast)
self.thedlblast.start()
return None
tempdir = tkFileDialog.askdirectory(
title="Please select a directory with tblastx and makeblastdb."
)
if os.path.exists(tempdir + "/tblastx.exe") and os.path.exists(
tempdir + "/makeblastdb.exe"
):
self.blastnDir = tempdir + "/tblastx.exe"
self.dbnDir = tempdir + "/makeblastdb.exe"
elif os.path.exists(tempdir + "/tblastx") and os.path.exists(
tempdir + "/makeblastdb"
):
self.blastnDir = tempdir + "/tblastx"
self.dbnDir = tempdir + "/makeblastdb"
else:
self.processLab.config(
text="Performing blastn...\nInvadild directory.\nBlast not found."
)
return None
if self.workingDir == "":
self.processLab.config(text="Performing blastn...\nCancelled.")
return None
self.thegenblast = threading.Thread(target=self.genBlastX2)
self.thegenblast.start()
self.thegenblast2 = threading.Thread(target=self.genBlastXdot)
self.thegenblast2.start()
def genBlastXdot(self):
while self.thegenblast.isAlive():
time.sleep(0.5)
self.processLab.config(text="Performing tblastx.")
time.sleep(0.5)
self.processLab.config(text="Performing tblastx..")
time.sleep(0.5)
self.processLab.config(text="Performing tblastx...")
self.processLab.config(text="Performing tblastx...\ncomplete.")
def genBlastX2(self):
self.blastlist.delete(0, END)
for i in range(self.genlist.size() - 1):
if self.dbnDir != None:
subprocess.Popen(
self.dbnDir
+ " -dbtype nucl -out tempdb -in "
+ str(i + 2)
+ ".easyfig.fa",
shell=True,
).wait()
elif self.dblDir != None:
subprocess.Popen(
self.dblDir
+ " -p F -t tempdb -n tempdb -i "
+ str(i + 2)
+ ".easyfig.fa",
shell=True,
).wait()
if self.blastnDir:
subprocess.Popen(
self.blastnDir
+ " -db tempdb -outfmt 6 -query "
+ str(i + 1)
+ ".easyfig.fa -out "
+ str(i + 1)
+ str(i + 2)
+ ".easyfig.out",
shell=True,
).wait()
elif self.blastlDir:
subprocess.Popen(
self.blastlDir
+ " -p tblastx -d tempdb -F F -m 8 -a 8 -i "
+ str(i + 1)
+ ".easyfig.fa -o "
+ str(i + 1)
+ str(i + 2)
+ ".easyfig.out",
shell=True,
).wait()
self.blastlist.insert(
END, self.workingDir + "/" + str(i + 1) + str(i + 2) + ".easyfig.out"
)
self.blastlist.xview_moveto(1)
if os.path.exists("tempdb.nhr"):
os.remove("tempdb.nhr")
if os.path.exists("tempdb.nin"):
os.remove("tempdb.nin")
if os.path.exists("error.log"):
os.remove("error.log")
if os.path.exists("tempdb.nsq"):
os.remove("tempdb.nsq")
if os.path.exists("formatdb.log"):
os.remove("formatdb.log")
os.chdir(self.pwd)
def annmod(self, event=None):
try:
self.annwindow.destroy()
except:
pass
self.annwindow = Toplevel()
self.frame6 = Frame(self.annwindow)
self.annwindow.title("Subregions")
self.frangelab = Label(self.frame6, text="Range", font="TkDefaultFont 13 bold")
self.frangelab.grid(row=0, column=2, columnspan=3)
self.ffilelab = Label(
self.frame6, text="Ann. file", font="TkDefaultFont 13 bold"
)
self.ffilelab.grid(row=1, column=1, pady=10)
self.fminlab = Label(self.frame6, text="Min", font="TkDefaultFont 13 bold")
self.fminlab.grid(row=1, column=2, pady=10)
self.fdotdot = Label(self.frame6, text=" .. ")
self.fdotdot.grid(row=1, column=3)
self.fmaxlab = Label(self.frame6, text="Max", font="TkDefaultFont 13 bold")
self.fmaxlab.grid(row=1, column=4, pady=10)
self.frevlab = Label(self.frame6, text="Reverse", font="TkDefaultFont 13 bold")
self.frevlab.grid(row=1, column=5, pady=10)
self.scrollbar2 = Scrollbar(self.frame6, orient=VERTICAL)
self.fgenlist = Listbox(
self.frame6, yscrollcommand=self.scrollbar2.set, exportselection=0
)
self.fgenlist.bind("<Button-1>", self.setselectedcuts)
self.fgenlist.bind("<Double-Button-1>", self.doublecuts)
self.fgenlist.bind("<MouseWheel>", self.onmousewheel)
self.fgenlist.bind("<Button-4>", self.onmousewheel)
self.fgenlist.bind("<Button-5>", self.onmousewheel)
self.fgenlist.grid(row=2, column=1)
self.fgenminlist = Listbox(
self.frame6, yscrollcommand=self.scrollbar2.set, exportselection=0
)
self.fgenminlist.config(width=7)
self.fgenminlist.bind("<Button-1>", self.setselectedcuts)
self.fgenminlist.bind("<Double-Button-1>", self.doublecuts)
self.fgenminlist.bind("<MouseWheel>", self.onmousewheel)
self.fgenminlist.bind("<Button-4>", self.onmousewheel)
self.fgenminlist.bind("<Button-5>", self.onmousewheel)
self.fgenminlist.grid(row=2, column=2)
self.fgenmaxlist = Listbox(
self.frame6, yscrollcommand=self.scrollbar2.set, exportselection=0
)
self.fgenmaxlist.config(width=7)
self.fgenmaxlist.bind("<Button-1>", self.setselectedcuts)
self.fgenmaxlist.bind("<Double-Button-1>", self.doublecuts)
self.fgenmaxlist.bind("<MouseWheel>", self.onmousewheel)
self.fgenmaxlist.bind("<Button-4>", self.onmousewheel)
self.fgenmaxlist.bind("<Button-5>", self.onmousewheel)
self.fgenmaxlist.grid(row=2, column=4)
self.fgenrevlist = Listbox(
self.frame6, yscrollcommand=self.scrollbar2.set, exportselection=0
)
self.fgenrevlist.config(width=5)
self.fgenrevlist.bind("<Button-1>", self.setselectedcuts)
self.fgenrevlist.bind("<Double-Button-1>", self.doublecuts)
self.fgenrevlist.bind("<MouseWheel>", self.onmousewheel)
self.fgenrevlist.bind("<Button-4>", self.onmousewheel)
self.fgenrevlist.bind("<Button-5>", self.onmousewheel)
self.fgenrevlist.grid(row=2, column=5)
self.scrollbar2.config(command=self.yview2)
self.scrollbar2.grid(row=2, column=0, sticky=NS)
annlist = self.genlist.get(0, END)
annlistpostemp = self.genlist.curselection()
for i in annlist:
self.fgenlist.insert(END, i)
self.fgenminlist.insert(END, self.mincutlist[i[:2]])
self.fgenmaxlist.insert(END, self.maxcutlist[i[:2]])
if self.revlist[i[:2]]:
self.fgenrevlist.insert(END, "yes")
else:
self.fgenrevlist.insert(END, "no")
self.fgenlist.xview_moveto(1)
self.genmincut = StringVar()
self.genmaxcut = StringVar()
self.genrev = IntVar()
self.mincutentry = Entry(self.frame6, textvariable=self.genmincut)
self.mincutentry.config(width=7)
self.mincutentry.grid(row=3, column=2)
self.maxcutentry = Entry(self.frame6, textvariable=self.genmaxcut)
self.maxcutentry.config(width=7)
self.maxcutentry.grid(row=3, column=4)
self.genrentry = Checkbutton(self.frame6, variable=self.genrev)
self.genrentry.grid(row=3, column=5)
if len(annlist) > 0 and annlistpostemp != ():
self.fgenlist.selection_set(annlistpostemp)
self.fgenminlist.selection_set(annlistpostemp)
self.fgenmaxlist.selection_set(annlistpostemp)
self.fgenrevlist.selection_set(annlistpostemp)
self.fgenlist.see(annlistpostemp)
self.fgenminlist.see(annlistpostemp)
self.fgenmaxlist.see(annlistpostemp)
self.fgenrevlist.see(annlistpostemp)
self.genmincut.set(self.fgenminlist.get(annlistpostemp))
self.genmaxcut.set(self.fgenmaxlist.get(annlistpostemp))
if self.fgenrevlist.get(annlistpostemp) == "yes":
self.genrev.set(1)
else:
self.genrev.set(0)
self.changecutsbutton = Button(
self.frame6, text=" change cutoffs ", command=self.changecuts
)
self.changecutsbutton.grid(row=3, column=1, pady=10)
self.annwindowclosebutton = Button(
self.frame6, text="close", command=self.annwindowclose
)
self.annwindowclosebutton.grid(
row=12, column=4, columnspan=2, sticky=E, pady=10
)
self.annwindow.geometry("+30+40")
self.frame6.grid(padx=30, pady=10)
def changecuts(self):
thepost = self.fgenlist.curselection()
if thepost == ():
tkMessageBox.showerror("Try again.", "Please select genome to change.")
return
else:
thepost = int(thepost[0])
self.fgenminlist.delete(thepost)
self.fgenminlist.insert(thepost, self.genmincut.get())
self.mincutlist[self.fgenlist.get(thepost)[:2]] = self.genmincut.get()
self.fgenmaxlist.delete(thepost)
self.fgenmaxlist.insert(thepost, self.genmaxcut.get())
self.maxcutlist[self.fgenlist.get(thepost)[:2]] = self.genmaxcut.get()
self.fgenrevlist.delete(thepost)
if self.genrev.get() == 1:
self.fgenrevlist.insert(thepost, "yes")
self.revlist[self.fgenlist.get(thepost)[:2]] = True
else:
self.fgenrevlist.insert(thepost, "no")
self.revlist[self.fgenlist.get(thepost)[:2]] = False
if not thepost == self.fgenlist.size() - 1:
self.fgenlist.selection_clear(0, END)
self.fgenlist.selection_set(thepost + 1, thepost + 1)
self.fgenlist.see(thepost + 1)
self.fgenminlist.selection_clear(0, END)
self.fgenminlist.selection_set(thepost + 1, thepost + 1)
self.fgenminlist.see(thepost + 1)
self.fgenmaxlist.selection_clear(0, END)
self.fgenmaxlist.selection_set(thepost + 1, thepost + 1)
self.fgenmaxlist.see(thepost + 1)
self.fgenrevlist.selection_clear(0, END)
self.fgenrevlist.selection_set(thepost + 1, thepost + 1)
self.fgenrevlist.see(thepost + 1)
def yview2(self, *args):
apply(self.fgenlist.yview, args)
apply(self.fgenminlist.yview, args)
apply(self.fgenmaxlist.yview, args)
apply(self.fgenrevlist.yview, args)
def onmousewheel(self, event):
return "break"
def setselectedcuts(self, event):
selected = self.fgenlist.nearest(event.y)
tempypos = self.fgenlist.yview()[0]
self.fgenminlist.yview_moveto(tempypos)
self.fgenmaxlist.yview_moveto(tempypos)
self.fgenrevlist.yview_moveto(tempypos)
self.fgenlist.selection_clear(0, END)
self.fgenlist.selection_set(selected, selected)
self.fgenminlist.selection_clear(0, END)
self.fgenminlist.selection_set(selected, selected)
self.fgenmaxlist.selection_clear(0, END)
self.fgenmaxlist.selection_set(selected, selected)
self.fgenrevlist.selection_clear(0, END)
self.fgenrevlist.selection_set(selected, selected)
self.genmincut.set(self.fgenminlist.get(selected))
self.genmaxcut.set(self.fgenmaxlist.get(selected))
if self.fgenrevlist.get(selected) == "yes":
self.genrev.set(1)
else:
self.genrev.set(0)
def doublecuts(self, event):
try:
self.doublecutswin.destroy()
except:
pass
self.doublecutsel = self.fgenlist.nearest(event.y)
self.doublecutswin = Toplevel(self.frame6)
self.doublecutswin.title("Change subregion")
self.frame10 = Frame(self.doublecutswin)
self.dublabel1 = Label(
self.frame10,
text="Modify file " + self.fgenlist.get(self.doublecutsel)[:3],
font="TkDefaultFont 13 bold",
)
self.dublabel1.grid(row=0, column=0, pady=5)
self.dublabel2 = Label(self.frame10, text="Min Cutoff:")
self.dublabel2.grid(row=1, column=0)
self.dublabel3 = Label(self.frame10, text="Max Cutoff:")
self.dublabel3.grid(row=2, column=0)
self.dublabel4 = Label(self.frame10, text="Reverse:")
self.dublabel4.grid(row=3, column=0)
self.dublabel2str = StringVar(value=self.fgenminlist.get(self.doublecutsel))
self.dublabel3str = StringVar(value=self.fgenmaxlist.get(self.doublecutsel))
if self.fgenrevlist.get(self.doublecutsel) == "yes":
self.dublabel4int = IntVar(value=1)
else:
self.dublabel4int = IntVar(value=0)
self.dublabel2ent = Entry(self.frame10, textvariable=self.dublabel2str)
self.dublabel2ent.grid(row=1, column=1)
self.dublabel3ent = Entry(self.frame10, textvariable=self.dublabel3str)
self.dublabel3ent.grid(row=2, column=1)
self.dublabel4ent = Checkbutton(self.frame10, variable=self.dublabel4int)
self.dublabel4ent.grid(row=3, column=1)
self.doublecutsclosebut = Button(
self.frame10, text="Save & Close", command=self.doublecutsclose
)
self.doublecutsclosebut.grid(row=4, column=1, sticky=E)
self.doublecutswin.geometry("+40+50")
self.frame10.grid(padx=20, pady=20)
def doublecutsclose(self):
self.fgenminlist.delete(self.doublecutsel)
self.fgenminlist.insert(self.doublecutsel, self.dublabel2str.get())
self.mincutlist[
self.fgenlist.get(self.doublecutsel)[:2]
] = self.dublabel2str.get()
self.fgenmaxlist.delete(self.doublecutsel)
self.fgenmaxlist.insert(self.doublecutsel, self.dublabel3str.get())
self.maxcutlist[
self.fgenlist.get(self.doublecutsel)[:2]
] = self.dublabel3str.get()
self.fgenrevlist.delete(self.doublecutsel)
if self.dublabel4int.get() == 1:
self.fgenrevlist.insert(self.doublecutsel, "yes")
self.revlist[self.fgenlist.get(self.doublecutsel)[:2]] = True
else:
self.fgenrevlist.insert(self.doublecutsel, "no")
self.revlist[self.fgenlist.get(self.doublecutsel)[:2]] = False
self.doublecutswin.destroy()
def annwindowclose(self):
self.annwindow.destroy()
def openFile1(self):
filename = tkFileDialog.askopenfilename(
filetypes=[
(
"genbank/embl/fasta",
(
"*.gbk",
"*.embl",
"*.gb",
"*.fa",
"*.fna",
"*.dna",
"*.fas",
"*.fasta",
),
),
("All files", "*"),
]
)
self.gen1.set(filename)
def openFile2(self):
filename = tkFileDialog.askopenfilename(
filetypes=[
(
"genbank/embl/fasta",
(
"*.gbk",
"*.embl",
"*.gb",
"*.fa",
"*.fna",
"*.dna",
"*.fas",
"*.fasta",
),
),
("All files", "*"),
]
)
self.gen2.set(filename)
def openFile3(self):
filename = tkFileDialog.askopenfilename(
filetypes=[
(
"genbank/embl/fasta",
(
"*.gbk",
"*.embl",
"*.gb",
"*.fa",
"*.fna",
"*.dna",
"*.fas",
"*.fasta",
),
),
("All files", "*"),
]
)
self.gen3.set(filename)
def openFile4(self):
filename = tkFileDialog.askopenfilename(
filetypes=[
(
"genbank/embl/fasta",
(
"*.gbk",
"*.embl",
"*.gb",
"*.fa",
"*.fna",
"*.dna",
"*.fas",
"*.fasta",
),
),
("All files", "*"),
]
)
self.gen4.set(filename)
def openFile5(self):
filename = tkFileDialog.askopenfilename(
filetypes=[
(
"genbank/embl/fasta",
(
"*.gbk",
"*.embl",
"*.gb",
"*.fa",
"*.fna",
"*.dna",
"*.fas",
"*.fasta",
),
),
("All files", "*"),
]
)
self.gen5.set(filename)
def openFile6(self):
filename = tkFileDialog.askopenfilename(
filetypes=[
(
"genbank/embl/fasta",
(
"*.gbk",
"*.embl",
"*.gb",
"*.fa",
"*.fna",
"*.dna",
"*.fas",
"*.fasta",
),
),
("All files", "*"),
]
)
self.gen6.set(filename)
def openFile7(self):
filename = tkFileDialog.askopenfilename(
filetypes=[
(
"genbank/embl/fasta",
(
"*.gbk",
"*.embl",
"*.gb",
"*.fa",
"*.fna",
"*.dna",
"*.fas",
"*.fasta",
),
),
("All files", "*"),
]
)
self.gen7.set(filename)
def openFile8(self):
filename = tkFileDialog.askopenfilename(
filetypes=[
(
"genbank/embl/fasta",
(
"*.gbk",
"*.embl",
"*.gb",
"*.fa",
"*.fna",
"*.dna",
"*.fas",
"*.fasta",
),
),
("All files", "*"),
]
)
self.gen8.set(filename)
def openFile9(self):
filename = tkFileDialog.askopenfilename(
filetypes=[
(
"genbank/embl/fasta",
(
"*.gbk",
"*.embl",
"*.gb",
"*.fa",
"*.fna",
"*.dna",
"*.fas",
"*.fasta",
),
),
("All files", "*"),
]
)
self.gen9.set(filename)
def openFile0(self):
filename = tkFileDialog.askopenfilename(
filetypes=[
(
"genbank/embl/fasta",
(
"*.gbk",
"*.embl",
"*.gb",
"*.fa",
"*.fna",
"*.dna",
"*.fas",
"*.fasta",
),
),
("All files", "*"),
]
)
self.gen0.set(filename)
def openBlast1(self):
filename = tkFileDialog.askopenfilename()
self.blast1.set(filename)
def openBlast2(self):
filename = tkFileDialog.askopenfilename()
self.blast2.set(filename)
def openBlast3(self):
filename = tkFileDialog.askopenfilename()
self.blast3.set(filename)
def openBlast4(self):
filename = tkFileDialog.askopenfilename()
self.blast4.set(filename)
def openBlast5(self):
filename = tkFileDialog.askopenfilename()
self.blast5.set(filename)
def openBlast6(self):
filename = tkFileDialog.askopenfilename()
self.blast6.set(filename)
def openBlast7(self):
filename = tkFileDialog.askopenfilename()
self.blast7.set(filename)
def openBlast8(self):
filename = tkFileDialog.askopenfilename()
self.blast8.set(filename)
def openBlast9(self):
filename = tkFileDialog.askopenfilename()
self.blast9.set(filename)
def getoutfile(self):
if self.filetype.get() == "Bitmap (bmp)":
filename = tkFileDialog.asksaveasfilename(
filetypes=[("bmp", "*.bmp"), ("All files", "*")]
)
else:
filename = tkFileDialog.asksaveasfilename(
filetypes=[("svg", "*.svg"), ("All files", "*")]
)
self.outfile.set(filename)
def handleDownload(self, block):
self.downloadFile.write(block)
if (
self.thecount * 100 / self.totalBytes
!= (self.thecount + len(block)) * 100 / self.totalBytes
):
try:
self.processLab.config(
text="Finding Blast... Done\nDownloading... "
+ str((self.thecount + len(block)) * 100 / self.totalBytes)
+ "%"
)
except:
pass
self.thecount += len(block)
def downloadBlastAuto(self):
self.thedlblast = threading.Thread(target=self.downloadBlast)
self.thedlblast.start()
def downloadBlastMan(self):
theplatform = platform.system()
architecture = platform.architecture()[0]
if theplatform == "Linux" and architecture == "32bit":
ok = tkMessageBox.askokcancel(
"Downloading Blast Manually",
"Easyfig suggests downloading\nand extracting\nncbi-blast-x.x.x+-ia32-linux.tar.gz\n\
clicking ok will bring up\nthe download location\nin your browser.",
)
if ok:
webbrowser.open_new(
"ftp://ftp.ncbi.nlm.nih.gov/blast/executables/blast+/LATEST/"
)
elif theplatform == "Linux" and architecture == "64bit":
ok = tkMessageBox.askokcancel(
"Downloading Blast Manually",
"Easyfig suggests downloading\nand extracting\nncbi-blast-x.x.x+-x64-linux.tar.gz\n\
clicking ok will bring up\nthe download location\nin your browser.",
)
if ok:
webbrowser.open_new(
"ftp://ftp.ncbi.nlm.nih.gov/blast/executables/blast+/LATEST/"
)
elif theplatform == "Windows" and architecture == "32bit":
ok = tkMessageBox.askokcancel(
"Downloading Blast Manually",
"Easyfig suggests downloading\nand running\nncbi-blast-x.x.x+-win32.exe\n\
clicking ok will bring up\nthe download location\nin your browser.",
)
if ok:
webbrowser.open_new(
"ftp://ftp.ncbi.nlm.nih.gov/blast/executables/blast+/LATEST/"
)
elif theplatform == "Windows" and architecture == "64bit":
ok = tkMessageBox.askokcancel(
"Downloading Blast Manually",
"Easyfig suggests downloading\nand running\nncbi-blast-x.x.x+-win64.exe\n\
clicking ok will bring up\nthe download location\nin your browser.",
)
if ok:
webbrowser.open_new(
"ftp://ftp.ncbi.nlm.nih.gov/blast/executables/blast+/LATEST/"
)
elif theplatform == "Darwin":
ok = tkMessageBox.askokcancel(
"Downloading Blast Manually",
"Easyfig suggests downloading\nand running\nncbi-blast-x.x.x+.dmg\n\
clicking ok will bring up\nthe download location\nin your browser.",
)
if ok:
webbrowser.open_new(
"ftp://ftp.ncbi.nlm.nih.gov/blast/executables/blast+/LATEST/"
)
else:
ok = tkMessageBox.askokcancel(
"Downloading Blast Manually",
"Easyfig suggests downloading\nand compiling\nncbi-blast-x.x.x+.src.tar.gz\n\
clicking ok will bring up\nthe download location\nin your browser.",
)
if ok:
webbrowser.open_new(
"ftp://ftp.ncbi.nlm.nih.gov/blast/executables/blast+/LATEST/"
)
def chooseBlastDir(self):
tempdir = tkFileDialog.askdirectory(
title="Please select a directory with blastn and makeblastdb."
)
if tempdir == () or tempdir == "":
return
if os.path.exists(tempdir + "/blastn.exe") and os.path.exists(
tempdir + "/makeblastdb.exe"
):
self.blastnDir = tempdir + "/blastn.exe"
self.dbnDir = tempdir + "/makeblastdb.exe"
elif os.path.exists(tempdir + "/blastn") and os.path.exists(
tempdir + "/makeblastdb"
):
self.blastnDir = tempdir + "/blastn"
self.dbnDir = tempdir + "/makeblastdb"
else:
tkMessageBox.showerror("Try again.", "Blast not found in Directory.")
self.blastnDir = None
self.dbnDir = None
def downloadBlast(self):
theplatform = platform.system()
architecture = platform.architecture()[0]
self.processLab.config(text="Finding Blast...")
try:
ftp = FTP("ftp.ncbi.nlm.nih.gov")
ftp.login()
ftp.cwd("blast/executables/blast+/LATEST/")
files = []
ftp.dir(files.append)
except:
self.processLab.config(
text="Unable to Create FTP \nconnection.\nPlease dowload manually."
)
return
filename = None
try:
true_platform = os.environ["PROCESSOR_ARCHITEW6432"]
if true_platform == "AMD64":
architecture = "64bit"
except KeyError:
pass
for line in files:
if (
theplatform == "Linux"
and architecture == "32bit"
and line.split()[8][-17:] == "ia32-linux.tar.gz"
):
filename = line.split()[8]
self.totalBytes = int(line.split()[4])
elif (
theplatform == "Linux"
and architecture == "64bit"
and line.split()[8][-16:] == "x64-linux.tar.gz"
):
filename = line.split()[8]
self.totalBytes = int(line.split()[4])
elif (
theplatform == "Windows"
and architecture == "32bit"
and line.split()[8][-17:] == "ia32-win32.tar.gz"
):
filename = line.split()[8]
self.totalBytes = int(line.split()[4])
elif (
theplatform == "Windows"
and architecture == "64bit"
and line.split()[8][-16:] == "x64-win64.tar.gz"
):
filename = line.split()[8]
self.totalBytes = int(line.split()[4])
elif (
theplatform == "Darwin"
and line.split()[8][-23:] == "universal-macosx.tar.gz"
):
filename = line.split()[8]
self.totalBytes = int(line.split()[4])
if filename == None:
self.processLab.config(
text="Unable to download blast.\nPlease dowload manually."
)
return
self.thecount = 0
try:
self.downloadFile = open(filename, "wb")
except:
self.processLab.config(
text="Unable to download blast.\nPlease dowload manually."
)
return
ftp.retrbinary("RETR " + filename, self.handleDownload)
self.downloadFile.close()
self.processLab.config(text="Downloading... Complete\nExtracting file...")
try:
tfile = tarfile.open(filename, "r:gz")
tfile.extractall()
except:
self.processLab.config(
text="Unable to download blast.\nPlease dowload manually."
)
return
filenamedir = filename.split("+")[0] + "+"
if theplatform == "Windows":
try:
shutil.move(filenamedir + "/bin/makeblastdb.exe", self.pwd)
except:
pass
try:
shutil.move(filenamedir + "/bin/blastn.exe", self.pwd)
except:
pass
try:
shutil.move(filenamedir + "/bin/tblastx.exe", self.pwd)
except:
pass
else:
try:
shutil.move(filenamedir + "/bin/makeblastdb", self.pwd)
except:
pass
try:
shutil.move(filenamedir + "/bin/blastn", self.pwd)
except:
pass
try:
shutil.move(filenamedir + "/bin/tblastx", self.pwd)
except:
pass
tfile.close()
shutil.rmtree(filenamedir)
os.remove(filename)
if os.path.exists(self.pwd + "/blastn") or os.path.exists(
self.pwd + "/blastn.exe"
):
self.processLab.config(text="Extracting file... Done\nBLAST+ installed.")
else:
self.processLab.config(
text="Unable to download blast.\nPlease dowload manually."
)
def gbk2fasta(genbank, out, mincut, maxcut):
getseq = False
getembl = False
getmultifa = False
seq = ""
try:
mincut = int(mincut)
if mincut < 1:
mincut = 1
if maxcut != "Max":
maxcut = int(maxcut)
if maxcut < 1:
maxcut = 1
except:
print ("Annotation slice values not valid.")
try:
gen = open(genbank)
outfile = open(out, "w")
for line in gen:
if line.startswith("ORIGIN"):
getseq = True
elif line.startswith("SQ Sequence"):
getembl = True
elif line.startswith(">"):
if getmultifa:
seq += "qqq"
else:
getmultifa = True
elif line.startswith("//"):
getseq = False
getembl = False
elif getseq:
seq += "".join(line.split()[1:])
elif getembl:
seq += "".join(line.split()[:-1])
elif getmultifa:
seq += line.rstrip()
if getmultifa:
getset = set(seq)
rightchars = set("atgcATGCqnNuUyYkKmMsSwWbBdDhHvVxXrR-")
isitgood = True
for i in getset:
if not i in rightchars:
isitgood = False
if not isitgood:
print ("Annotation file contains invalid characters. Check genbank/EMBL contains no lines starting with > or that fasta file contains only valid nucleotides")
return 0
if "/" in out:
outfile.write(">" + out.split("/")[1] + "\n")
else:
outfile.write(">" + out + "\n")
if maxcut == "Max":
maxcut = len(seq)
if mincut == 1 and maxcut == len(seq):
if getmultifa:
seq = seq.replace("qqq", "n" * int(len(seq) / 500))
outfile.write(seq)
elif mincut < maxcut:
seq = seq[mincut - 1 : maxcut]
if getmultifa:
seq = seq.replace("qqq", "n" * int(len(seq) / 500))
outfile.write(seq)
else:
seq = seq[mincut - 1 :] + seq[:maxcut]
if getmultifa:
seq = seq.replace("qqq", "n" * int(len(seq) / 500))
outfile.write(seq)
if len(seq) == 0:
print ("There is no sequence in " + genbank + ".")
return 0
else:
return 1
except:
print (genbank + " does not exist.")
return 0
def getGCcontent(filename, windsize, step, mincut, maxcut):
try:
gen = open(filename)
getseq = False
getembl = False
seq = ""
for line in gen:
if line.startswith("ORIGIN"):
getseq = True
elif line.startswith("SQ Sequence"):
getembl = True
elif line.startswith("//"):
getseq = False
getembl = False
elif getseq:
seq += "".join(line.split()[1:])
elif getembl:
seq += "".join(line.split()[:-1])
gen.close()
seq = seq.upper()
except:
print ("Annotation file " + filename + " not valid.")
return None
if len(seq) == 0:
print ("Annotation file " + filename + " not valid.")
return None
if maxcut == "Max":
seq = seq[int(mincut) - 1 :]
elif int(maxcut) <= int(mincut):
seq = seq[int(mincut) - 1 :] + seq[: int(maxcut) + 1]
else:
seq = seq[int(mincut) - 1 : int(maxcut) + 1]
window1 = int(windsize) / 2
window2 = int(windsize) - window1
thearray = []
for i in range(0, len(seq), int(step)):
seqstring = seq[max([0, i - window1]) : i + window2]
thearray.append(
(seqstring.count("G") + seqstring.count("C")) * 1.0 / len(seqstring) - 0.5
)
return thearray
def getGCskew(filename, windsize, step, mincut, maxcut):
try:
getseq = False
getembl = False
seq = ""
gen = open(filename)
for line in gen:
if line.startswith("ORIGIN"):
getseq = True
elif line.startswith("SQ Sequence"):
getembl = True
elif line.startswith("//"):
getseq = False
getembl = False
elif getseq:
seq += "".join(line.split()[1:])
elif getembl:
seq += "".join(line.split()[:-1])
gen.close()
seq = seq.upper()
except:
print ("Annotation file " + filename + " not valid.")
return None
if len(seq) == 0:
print ("Annotation file " + filename + " not valid.")
return None
if maxcut == "Max":
seq = seq[int(mincut) - 1 :]
elif int(maxcut) <= int(mincut):
seq = seq[int(mincut) - 1 :] + seq[: int(maxcut) + 1]
else:
seq = seq[int(mincut) - 1 : int(maxcut) + 1]
window1 = int(windsize) / 2
window2 = int(windsize) - window1
thearray = []
for i in range(0, len(seq), int(step)):
seqstring = seq[max([0, i - window1]) : i + window2]
gcount = seqstring.count("G")
ccount = seqstring.count("C")
try:
thearray.append((gcount - ccount) * 1.0 / (gcount + ccount))
except:
thearray.append(0)
return thearray
def getCoverage(filename, filename2, mincut, maxcut):
# DEFNIITION: takes a file and reads in all contigs, their start positions and the reads located within the contig
# REQUIRES: a valid ace file
# RETURNS: A list of objects of class contig
seq = ""
getseq = False
getembl = False
try:
gen = open(filename)
for line in gen:
if line.startswith("ORIGIN"):
getseq = True
elif line.startswith("SQ Sequence"):
getembl = True
elif line.startswith("//"):
getseq = False
getembl = False
elif getseq:
seq += "".join(line.split()[1:])
elif getembl:
seq += "".join(line.split()[:-1])
gen.close()
except:
print ("Annotation file " + filename + " not valid.")
return None
if len(seq) == 0:
print ("Annotation file " + filename + " not valid.")
return None
seq = seq.lower()
if maxcut == "Max":
seq = seq[int(mincut) - 1 :]
elif int(maxcut) <= int(mincut):
seq = seq[int(mincut) - 1 :] + seq[: int(maxcut) + 1]
else:
seq = seq[int(mincut) - 1 : int(maxcut) + 1]
outlist = [0 for i in range(len(seq))]
readlist = [] # list of reads to be added to the contig class
index = 0 # switches to 1 once program has dealt with the initial contig
# iterates through the file determines what information is contained in each line then reads it to the
# right locationregular expressions python
transtab = string.maketrans("atgc", "tacg")
acefile = open(filename2)
for line in acefile:
# puts name in file and starts reading sequence below
if line.startswith("CO "):
if index != 0:
freqDict = {}
for j in readlist:
for k in range(j.startpos, (j.startpos + j.readLength)):
if k in freqDict:
freqDict[k] += 1
else:
freqDict[k] = 1
coverageList = []
for j in range(1, len(contigSeq) + 1):
if contigSeq[j - 1] != "*":
coverageList.append(freqDict[j])
contigSeq = contigSeq.lower()
thepos = seq.find(contigSeq)
if thepos != -1:
outlist = (
outlist[:thepos]
+ coverageList
+ outlist[thepos + len(coverageList) :]
)
else:
contigSeq = contigSeq[::-1]
contigSeq = contigSeq.translate(transtab)
thepos = seq.find(contigSeq)
if thepos != -1:
coverageList.reverse()
outlist = (
outlist[:thepos]
+ coverageList
+ outlist[thepos + len(coverageList) :]
)
readlist = []
index = 1
contigSeq = ""
contigName = line.split()[
1
] # splits the line into a list with elements seperated by whitespace characters
# then returns the second element of that list (the name)
readnumber = 0 # initiates the read number used to determine where the readsequence will be added
# creates a object of class read with the name and location within the contig, leaves sequence as the
# empty string to be read in later
elif line.startswith("BQ"):
index = 2
elif line.startswith("AF "):
readIt = (
line.split()
) # splits the line into a list of strings seperated by whitespace characters
readName = readIt[1] # the name of the read
readPos = int(readIt[3]) # the position of the read within the contig
readInstance = read(
readName, readPos, None
) # creates an instance of class read
readlist.append(readInstance) # appends to list
elif index == 1:
contigSeq += line[:-1]
elif line.startswith("QA "):
readlist[readnumber].startpos = (
readlist[readnumber].startpos + int(line.split()[1]) - 1
)
readlist[readnumber].readLength = (
int(line.split()[2]) - int(line.split()[1]) + 1
)
readnumber += 1
freqDict = {}
for j in readlist:
for k in range(j.startpos, (j.startpos + j.readLength)):
if k in freqDict:
freqDict[k] += 1
else:
freqDict[k] = 1
coverageList = []
for j in range(1, len(contigSeq) + 1):
if contigSeq[j - 1] != "*":
coverageList.append(freqDict[j])
contigSeq = contigSeq.lower()
thepos = seq.find(contigSeq)
if thepos != -1:
outlist = (
outlist[:thepos] + coverageList + outlist[thepos + len(coverageList) :]
)
else:
contigSeq = contigSeq[::-1]
contigSeq = contigSeq.translate(transtab)
thepos = seq.find(contigSeq)
if thepos != -1:
coverageList.reverse()
outlist = (
outlist[:thepos] + coverageList + outlist[thepos + len(coverageList) :]
)
return outlist
def getCustom(filename):
try:
thearray = []
gen = open(filename)
templine = gen.readline().rstrip().split("\t")
linelen = len(templine)
for i in templine:
thearray.append([float(i)])
for line in gen:
templine = line.rstrip().split("\t")
for i in range(len(templine)):
if templine[i] != "":
thearray[i].append(float(templine[i]))
return thearray
except:
print (filename + " not valid graph file.")
return None
def genBlast(inlist, cutlist):
try:
os.mkdir("temp_easyfig")
except:
pass
num = 1
outlist = []
for i in inlist:
gbk2fasta(
i,
"temp_easyfig/" + str(num) + ".easyfig.fa",
cutlist[num - 1][0],
cutlist[num - 1][1],
)
num += 1
for i in range(len(inlist) - 1):
if isNewBlastDB():
subprocess.Popen(
"makeblastdb -dbtype nucl -out temp_easyfig/tempdb -in temp_easyfig/"
+ str(i + 2)
+ ".easyfig.fa",
shell=True,
).wait()
print ("makeblastdb -dbtype nucl -out temp_easyfig/tempdb -in temp_easyfig/" + str(
i + 2
) + ".easyfig.fa")
elif isLegBlastDB():
subprocess.Popen(
"formatdb -p F -t tempdb -n temp_easyfig/tempdb -i temp_easyfig/"
+ str(i + 2)
+ ".easyfig.fa",
shell=True,
).wait()
else:
print ("Could not find BLAST.")
sys.exit()
if isNewBlastn():
subprocess.Popen(
"blastn -task blastn -db temp_easyfig/tempdb -outfmt 6 -query temp_easyfig/"
+ str(i + 1)
+ ".easyfig.fa -out temp_easyfig/"
+ str(i + 1)
+ str(i + 2)
+ ".easyfig.out",
shell=True,
).wait()
elif isLegBlastall():
subprocess.Popen(
"blastall -p blastn -d temp_easyfig/tempdb -F F -m 8 -a 8 -i temp_easyfig/"
+ str(i + 1)
+ ".easyfig.fa -o temp_easyfig/"
+ str(i + 1)
+ str(i + 2)
+ ".easyfig.out",
shell=True,
).wait()
else:
print ("Could not find BLAST.")
sys.exit()
outlist.append(inlist[i])
outlist.append("temp_easyfig/" + str(i + 1) + str(i + 2) + ".easyfig.out")
outlist.append(inlist[-1])
return outlist
def genTBlastX(inlist, cutlist):
pwd = os.getcwd()
if os.path.exists("temp_easyfig"):
print ("please run from a directory without the folder temp_easyfig")
sys.exit()
os.mkdir("temp_easyfig")
os.chdir("temp_easyfig")
num = 1
outlist = []
for i in inlist:
if i[0] in ["/", "\\", "~"]:
thepath = i
else:
thepath = "../" + i
gbk2fasta(
thepath, str(num) + ".easyfig.fa", cutlist[num - 1][0], cutlist[num - 1][1]
)
num += 1
for i in range(len(inlist) - 1):
if isNewBlastDB():
subprocess.Popen(
"makeblastdb -dbtype nucl -out tempdb -in "
+ str(i + 2)
+ ".easyfig.fa",
shell=True,
).wait()
elif isLegBlastDB():
subprocess.Popen(
"formatdb -p F -t tempdb -n tempdb -i " + str(i + 2) + ".easyfig.fa",
shell=True,
).wait()
else:
print ("Could not find BLAST.")
sys.exit()
if isNewTblastx():
subprocess.Popen(
"tblastx -db tempdb -outfmt 6 -query "
+ str(i + 1)
+ ".easyfig.fa -out "
+ str(i + 1)
+ str(i + 2)
+ ".easyfig.out",
shell=True,
).wait()
elif isLegBlastall():
subprocess.Popen(
"blastall -p tblastx -d tempdb -F F -m 8 -a 8 -i "
+ str(i + 1)
+ ".easyfig.fa -o "
+ str(i + 1)
+ str(i + 2)
+ ".easyfig.out",
shell=True,
).wait()
else:
print ("Could not find BLAST.")
sys.exit()
outlist.append(inlist[i])
outlist.append(os.getcwd() + "/" + str(i + 1) + str(i + 2) + ".easyfig.out")
os.chdir(pwd)
outlist.append(inlist[-1])
return outlist
global abortCaptain
minlength = 0
mineval = 0.001
minIdent = 0
inputlist = []
width = 5000
height1 = 50
height2 = 100
minblastc = (200, 200, 200)
maxblastc = (100, 100, 100)
minblastci = (200, 200, 200)
maxblastci = (100, 100, 100)
drawfig1 = False
drawfig2 = False
drawfig3 = False
compress = True
reverseList = []
featDict = {}
glt = 5
exont = 2
genet = 1
featlengths = []
aln = "centre"
graphit = None
blastoutline = True
minmaxlist = []
getgc = False
getgcskew = False
getcoverage = False
getcustom = False
windsize = 1000
step = 1000
graphit = None
multigraph = True
loggraph = False
gtype = "Histogram"
axisthick = 1
pvc = (255, 0, 0)
nvc = (0, 0, 255)
ggap = 10
gheight = 50
blastit = True
tblastit = False
blastfiles = None
lastflag = 1
filename = None
svg = False
filter = False
keep_blast = False
nofeat = False
gmaxy = "Auto"
legend = "None"
legname = "gene"
abortCaptain = False
if (
len(sys.argv) >= 2
and sys.argv[1] != "--help"
and sys.argv[1] != "-h"
and sys.argv[1] != "-help"
):
for i in range(1, len(sys.argv)):
if sys.argv[i][:1] == "-":
lastflag = i + 2
if sys.argv[i] == "-o":
filename = sys.argv[i + 1]
elif sys.argv[i] == "-e":
mineval = float(sys.argv[i + 1])
elif sys.argv[i] == "-min_length":
minlength = int(sys.argv[i + 1])
elif sys.argv[i] == "-i":
minIdent = float(sys.argv[i + 1])
elif sys.argv[i] == "-width":
width = int(sys.argv[i + 1])
elif sys.argv[i] == "-ann_height":
height1 = int((sys.argv[i + 1]))
elif sys.argv[i] == "-blast_height":
height2 = int((sys.argv[i + 1]))
elif sys.argv[i] == "-f1":
if (
sys.argv[i + 1] == "T"
or sys.argv[i + 1] == "t"
or sys.argv[i + 1] == "True"
or sys.argv[i + 1] == "true"
):
drawfig1 = True
elif (
sys.argv[i + 1] == "F"
or sys.argv[i + 1] == "f"
or sys.argv[i + 1] == "False"
or sys.argv[i + 1] == "false"
):
drawfig1 = False
elif sys.argv[i] == "-f2":
drawfig2 = int(sys.argv[i + 1])
elif sys.argv[i] == "-f3":
drawfig3 = sys.argv[i + 1]
elif sys.argv[i] == "-uncomp":
if (
sys.argv[i + 1] == "T"
or sys.argv[i + 1] == "t"
or sys.argv[i + 1] == "True"
or sys.argv[i + 1] == "true"
):
compress = False
elif sys.argv[i] == "-blastn":
blastit = True
lastflag -= 1
elif sys.argv[i] == "-tblastx":
tblastit = True
blastit = False
lastflag -= 1
elif sys.argv[i] == "-blast_files":
blastit = False
blastfiles = i
elif sys.argv[i] == "-blast_col":
if sys.argv[i + 1].isdigit():
lastflag = i + 7
t1 = int(sys.argv[i + 1])
t2 = int(sys.argv[i + 2])
t3 = int(sys.argv[i + 3])
t4 = int(sys.argv[i + 4])
t5 = int(sys.argv[i + 5])
t6 = int(sys.argv[i + 6])
else:
if sys.argv[i + 1] == "blue":
t1, t2, t3, t4, t5, t6 = 30, 144, 255, 25, 25, 112
elif sys.argv[i + 1] == "red":
t1, t2, t3, t4, t5, t6 = 200, 100, 0, 255, 0, 0
elif sys.argv[i + 1] == "gray":
t1, t2, t3, t4, t5, t6 = 20, 20, 20, 175, 175, 175
minblastc = (t1, t2, t3)
maxblastc = (t4, t5, t6)
elif sys.argv[i] == "-blast_col_inv":
if sys.argv[i + 1].isdigit():
lastflag = i + 7
t1 = int(sys.argv[i + 1])
t2 = int(sys.argv[i + 2])
t3 = int(sys.argv[i + 3])
t4 = int(sys.argv[i + 4])
t5 = int(sys.argv[i + 5])
t6 = int(sys.argv[i + 6])
else:
if sys.argv[i + 1] == "blue":
t1, t2, t3, t4, t5, t6 = 30, 144, 255, 25, 25, 112
elif sys.argv[i + 1] == "red":
t1, t2, t3, t4, t5, t6 = 200, 100, 0, 255, 0, 0
minblastci = (t1, t2, t3)
maxblastci = (t4, t5, t6)
elif sys.argv[i] == "-f":
r, g, b = 64, 224, 208
arrow = "arrow"
feat = sys.argv[i + 1]
if feat == "F":
nofeat = True
if len(sys.argv) > i + 2 and sys.argv[i + 2].isdigit():
r = int(sys.argv[i + 2])
g = int(sys.argv[i + 3])
b = int(sys.argv[i + 4])
if len(sys.argv) > i + 5 and (
sys.argv[i + 5] == "arrow"
or sys.argv[i + 5] == "rect"
or sys.argv[i + 5] == "frame"
or sys.argv[i + 5] == "pointer"
):
arrow = sys.argv[i + 5]
lastflag = i + 6
else:
lastflag = i + 5
if len(sys.argv) > i + 2 and (
sys.argv[i + 2] == "arrow"
or sys.argv[i + 2] == "rect"
or sys.argv[i + 2] == "frame"
or sys.argv[i + 2] == "pointer"
):
arrow = sys.argv[i + 2]
lastflag = i + 3
featDict[feat] = (arrow, (r, g, b))
elif sys.argv[i] == "-glt":
glt = int(sys.argv[i + 1])
elif sys.argv[i] == "-exont":
exont = int(sys.argv[i + 1])
elif sys.argv[i] == genet:
genet = int(sys.argv[i + 1])
elif sys.argv[i] == "-aln":
aln = sys.argv[i + 1]
if aln == "best":
aln = "best blast"
elif sys.argv[i] == "-bo":
if (
sys.argv[i + 1] == "T"
or sys.argv[i + 1] == "t"
or sys.argv[i + 1] == "True"
or sys.argv[i + 1] == "true"
):
blastoutline = True
else:
blastoutline = False
elif sys.argv[i] == "-G":
if sys.argv[i + 1] == "GCContent":
getgc = True
elif sys.argv[i + 1] == "GCSkew":
getgcskew = True
elif sys.argv[i + 1] == "Coverage":
getcoverage = True
gfilename = sys.arv[i + 2]
lastflag += 1
elif sys.argv[i + 1] == "Custom":
getcustom = True
gfilename = sys.argv[i + 2]
lastflag += 1
else:
print (sys.argv[i + 1] + " not a valid graph type")
elif sys.argv[i] == "-wind_size":
windsize = int(sys.argv[i + 1])
elif sys.argv[i] == "-step":
step = int(sys.argv[i + 1])
elif sys.argv[i] == "-line":
if (
sys.argv[i + 1] == "T"
or sys.argv[i + 1] == "t"
or sys.argv[i + 1] == "True"
or sys.argv[i + 1] == "true"
):
gtype = "Line"
elif sys.argv[i] == "-axis_t":
axisthick = sys.argv[i + 1]
elif sys.argv[i] == "-pos_col":
lastflag = i + 4
r = int(sys.argv[i + 1])
g = int(sys.argv[i + 2])
b = int(sys.argv[i + 3])
pvc = (r, g, b)
elif sys.argv[i] == "-neg_col":
lastflag = i + 4
r = int(sys.argv[i + 1])
g = int(sys.argv[i + 2])
b = int(sys.argv[i + 3])
nvc = (r, g, b)
elif sys.argv[i] == "-g_height":
gheight = int(sys.argv[i + 1])
elif sys.argv[i] == "-gap":
ggap = int(sys.argv[i + 1])
elif sys.argv[i] == "-y_max":
gmaxy = int(sys.argv[i + 1])
elif sys.argv[i] == "-A":
if (
sys.argv[i + 1] == "T"
or sys.argv[i + 1] == "t"
or sys.argv[i + 1] == "True"
or sys.argv[i + 1] == "true"
):
auto = True
else:
auto = False
elif sys.argv[i] == "-svg":
svg = True
lastflag -= 1
elif sys.argv[i] == "-keep":
keep_blast = True
lastflag -= 1
elif sys.argv[i] == "-filter":
filter = True
lastflag -= 1
elif sys.argv[i] == "-legend":
if sys.argv[i + 1] == "single":
legend = "Single column"
elif sys.argv[i + 1] == "double":
legend = "Two columns"
elif sys.argv[i + 1] == "top":
legend = "Top"
elif sys.argv[i + 1] == "bottom":
legend = "Bottom"
elif sys.argv[i + 1] == "both":
legend = "Top & Bottom"
else:
print ("Legend options are <single/double/top/bottom/both/None> (case sensitive), using None.")
elif sys.argv[i] == "-leg_name":
legname = sys.argv[i + 1]
inlist = sys.argv[lastflag + 1 :]
if blastfiles != None and lastflag == blastfiles + 2:
allthestuff = sys.argv[blastfiles + 1 :]
allthestuff2 = []
for i in allthestuff:
if i != "R" and i != "Max" and not i.isdigit():
allthestuff2.append(i)
inlist = allthestuff[len(allthestuff2) / 2 :]
last = inlist[0]
inlist = inlist[1:]
else:
last = sys.argv[lastflag]
templist = []
revlist = []
cutlist = []
rev = False
cuts = [None, None]
for i in inlist:
if i == "R" or i == "Max" or i.isdigit():
if os.path.exists(i):
sys.stderr.write(
'Cannot tell if "'
+ i
+ '" is an file or argument (the file exists and this is also the argument to trim or reverse genome).\
\nPlease rename file (if file) or remove file from directory (if argument).\n'
)
sys.exit()
if i == "R":
rev = True
getit = True
elif i.isdigit():
if cuts[0] == None:
cuts[0] = int(i)
else:
cuts[1] = int(i)
elif i == "Max":
cuts[1] = i
else:
revlist.append(rev)
if cuts == [None, None]:
cuts = [1, "Max"]
cutlist.append(tuple(cuts))
templist.append(last)
rev = False
cuts = [None, None]
last = i
revlist.append(rev)
if cuts == [None, None]:
cuts = [1, "Max"]
cutlist.append(tuple(cuts))
for i in cutlist:
if None in i:
sys.stderr.write(
"Please provide a start coordinate and end coordinate for genome cuts. (Only a single coordinate was provided)\n"
)
sys.exit()
templist.append(last)
if getgc:
thearray = []
for j in range(len(templist)):
mincut, maxcut = cutlist[j]
thearray.append(getGCcontent(templist[j], windsize, step, mincut, maxcut))
graphit = [thearray, pvc, nvc, gheight, axisthick, gtype, gmaxy, ggap]
elif getgcskew:
thearray = []
for j in range(len(templist)):
mincut, maxcut = cutlist[j]
thearray.append(getGCskew(templist[j], windsize, step, mincut, maxcut))
graphit = [thearray, pvc, nvc, gheight, axisthick, gtype, gmaxy, ggap]
elif getcustom:
thearray = getcustom(gfilename)
graphit = [thearray, pvc, nvc, gheight, axisthick, gtype, gmaxy, ggap]
elif getcoverage:
thearray = [getCoverage(templist[0], gfilename, cutlist[0][0], cutlist[0][1])]
graphit = [thearray, pvc, nvc, gheight, axisthick, gtype, gmaxy, ggap]
if blastit:
inlist = genBlast(templist, cutlist)
elif tblastit:
inlist = genTBlastX(templist, cutlist)
elif blastfiles != None:
inlist = []
tempfiles = sys.argv[blastfiles + 1 :]
for i in templist[:-1]:
inlist.append(i)
inlist.append(tempfiles.pop(0))
inlist.append(templist[-1])
else:
"Please choolse -blastn or -tblastx flags to generate blast files, or use -blast_files to use previously generated files."
if filename == None:
print ("Please choose a file to write to (-o tag) and try agian.")
sys.exit()
if featDict == {} and not nofeat:
featDict = {"CDS": ("arrow", (64, 224, 208))}
if svg:
x = drawsvg(
filename,
minlength,
mineval,
minIdent,
inlist,
width,
height1,
height2,
minblastc,
maxblastc,
minblastci,
maxblastci,
drawfig1,
drawfig2,
drawfig3,
compress,
revlist,
featDict,
glt,
exont,
genet,
featlengths,
aln,
graphit,
blastoutline,
cutlist,
filter,
legend,
legname,
)
else:
x = draw(
filename,
minlength,
mineval,
minIdent,
inlist,
width,
height1,
height2,
minblastc,
maxblastc,
minblastci,
maxblastci,
drawfig1,
drawfig2,
drawfig3,
compress,
revlist,
featDict,
glt,
exont,
genet,
featlengths,
aln,
graphit,
blastoutline,
cutlist,
filter,
legend,
legname,
)
if (blastit or tblastit) and not keep_blast:
shutil.rmtree("temp_easyfig")
print ("Minimum blast hit reported: " + str(x) + "%")
elif len(sys.argv) == 1:
from Tkinter import *
import tkFileDialog
import tkMessageBox
import tkSimpleDialog
import tkColorChooser
class DDlistbox(Listbox):
def __init__(self, master, **kw):
kw["selectmode"] = SINGLE
Listbox.__init__(self, master, kw)
self.bind("<Button-1>", self.setCurrent)
self.bind("<B1-Motion>", self.shiftSelection)
self.curIndex = None
def setCurrent(self, event):
self.curIndex = self.nearest(event.y)
def shiftSelection(self, event):
i = self.nearest(event.y)
if i < self.curIndex:
x = self.get(i)
self.delete(i)
self.insert(i + 1, x)
self.curIndex = i
elif i > self.curIndex:
x = self.get(i)
self.delete(i)
self.insert(i - 1, x)
self.curIndex = i
abortCaptain = False
root = Tk()
root.title("Easyfig.py")
root.option_add("*Font", "TkDefaultFont 12")
app = App(root)
root.mainloop()
else:
print (
"""
Easyfig.py Written by: Mitchell Sullivan mjsull@gmail.com
Supervisor: Dr. Scott Beatson University of Queensland 03.12.2010
License: GPLv3
Version 2.2.3
Usage: Easyfig.py [options] GenBank/EMBL/fasta GenBank/EMBL/fasta GenBank/EMBL/fasta ...
This script should work on 1 to an infinite amount of GenBank/EMBL files (given enough memory)
Adding 2 integers after the annotation file will crop the annotation file.
Adding a R after the annotation file will reverse compliment it.
WARNING: Will overwrite output file without warning.
WARNING: Will delete temp_easyfig folder if -keep flag not given.
***************************************************************
GenBank or EMBL file must have source line, or Sequence.
' source 1..<sequence length>' or 'FT source 1..<sequence length>'
for GenBank / EMBL
***************************************************************
The GenBank file preceding the blast file should always be the query
the GenBank file after the blast file should always be the reference
In it's present state only 'CDS' features will be recorded
Options:
-o <string> Specify output file. <REQUIRED!>
-blastn Generate blastn files automatically. Requires blastall or blast+
in the path, Annotation file must have nucleotide sequence. [Default]
-tblastx Generate tblastx files automatically. Requires blastall or blast+
in the path, Annotation file must have nucleotide sequence.
-blast_files List of previously generated blast files, ordered. Query must be
annotation file on top, reference annotation file on bottom.
-svg Create Scalable Vector Graphics (svg) file instead of bmp.
-filter Filter small blast hits or annotations (< 4 pixels wide). [F]
GENERAL OPTIONS:
-width <int> width of figure in pixels. [5000]
-ann_height <int> height of annotations in figure (pixels). [50]
-blast_height <int> height of blast hits in figure (pixels). [100]
-f1 <T/F> draw colour gradient figure for blast hits. [F]
-f2 <int> draw scale figure <int> base pairs long. [0]
-uncomp <T/F> Do not compress figure. [F]
-f <string> [r g b] [arrow/rect/pointer/frame]
Draw features of type <string> (case sensitive) in the
color r g b with illustration type arrow, rectangle,
pointer or frame. Default light blue arrows.
EXAMPLE: -f CDS 255 0 0 rect will draw all CDS features as
a red rectangle.
if none specified easyFig automatically draws CDS features.
If you want a figure with no features drawn use -f F
-glt <int> Genome line is <int> pixels thick [5]
-exont <int> exon lines joining introns are <int> pixels thick. [1]
-genet <int> outline of features is <int> pixels thick. [1]
-aln <best/left/right/centre> [centre]
Alignment of genomes
best aligns feature file perpendicular to best blast hit.
-legend <single/double/top/bottom/both/None>
Single: Gene names in single column
Double: Gene names in two columns
Top: Top feature file genes labelled above figure
Bottom: Bottom feature file genes labelled below figure
Both: Top and bottom feature files genes labelled above
and below genome.
None: No legend or gene labelling <default>
-leg_name Where to get feature name from [gene]
BLAST OPTIONS:
-e <float> maxmimum e value of blast hits to be drawn. [0.001]
-i <float> minimum identity value of blast hits to be drawn. [0]
-min_length <int> minimum length of blast hits to be drawn. [0]
-blast_col <red/blue> changes blast hits to gradient of red or blue
alternitively <int1 int2 int3 int4 int5 int6>
defines color gradient for blast hits
worst blast hit reported will be color int1 int2 int3
where int 1 2 3 is the RGB of color range[0-255]
100% identity blast hits will be color int4 int5 int6
[default 20 20 20 175 175 175] <gray>
-blast_col_inv Colour for inverted blast hits.
-bo <T/F> Black outline of blast hits. [T]
-keep Don't delete blast output (temp_easyfig/)
GRAPH OPTIONS:
-G <GCContent/GCSkew/Coverage/Custom [filename]>
Plot GC Content, GC Skew, Coverage or Custom graph.
if Coverage or Custom filename for ace or custom file needs
to be provided. Details on how to make custom graph files
in manual.
-wind_size <int> Window size for calculating GC content/GC skew. [1000]
-step <int> Step size for calculating GC content/GC skew. [1000]
-line <T/F> Draw graph as a line graph. [T]
-axis_t Thickness of X axis. [1]
-pos_col <int int int> RGB colour of positive values in graph. [Red]
-neg_col <int int int> RGB colour of negative values in graph. [Blue]
-g_height <int> height of graph in pixels. [50]
-gap gap between graph and annotations. [10]
-y_max Maximum y value [Default: max Y calculated.]
EXAMPLES:
Easyfig.py -filter -o outfile.bmp genbank1.gbk genbank2.gbk genbank3.gbk
Easiest way to generate a simple comparison file between three (or more) annotation
files. Shows CDS features as red arrows.
Easyfig.py -o outfile.bmp -e 0.00001 -f gene frame 0 0 255 -G GCContent ann1.embl ann2.gbk ann3.gbk ann4.embl
Generate a blastn comparison between 4 annotation files, Display genes as blue
arrows in frame. Only report blast hits under 0.00001 expect value.
Display the GC content of each file as a graph.
Easyfig.py -tblastx -o outfile.svg -svg ann1.embl 1 10000 ann2.embl 1 10000 R
Show a tblastx comparison of the first 10000 base pairs of ann1.embl and ann2.embl
Reverse compliment ann2.embl. Writes as a SVG file.
this script uses a modified version of Paul McGuire's (http://www.geocities.com/ptmcg/ RIP (geocities, not paul))
bmp.py - module for constructing simple BMP graphics files
"""
)
|
TAMU-CPT/galaxy-tools
|
tools/gbk_viz/Easyfig.py
|
Python
|
gpl-3.0
| 914,274
|
[
"BLAST",
"VisIt"
] |
e6d236996cae6c5a9f143d3ff048a2f23550526cae4b87954dddbc0857b76128
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
################################################################################
#
# ChemPy - A chemistry toolkit for Python
#
# Copyright (c) 2010 by Joshua W. Allen (jwallen@mit.edu)
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the 'Software'),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
################################################################################
"""
This module contains the thermodynamics models that are available in ChemPy.
All such models derive from the :class:`ThermoModel` base class.
"""
################################################################################
import math
import numpy
import cython
import constants
from exception import InvalidThermoModelError
################################################################################
class ThermoError:
"""
An exception class for errors that occur while working with thermodynamics
models. Pass a string describing the circumstances that caused the
exceptional behavior.
"""
pass
################################################################################
class ThermoModel:
"""
A base class for thermodynamics models, containing several attributes
common to all models:
=============== =============== ============================================
Attribute Type Description
=============== =============== ============================================
`Tmin` :class:`float` The minimum temperature in K at which the model is valid
`Tmax` :class:`float` The maximum temperature in K at which the model is valid
`comment` :class:`str` A string containing information about the model (e.g. its source)
=============== =============== ============================================
"""
def __init__(self, Tmin=0.0, Tmax=1.0e10, comment=''):
self.Tmin = Tmin
self.Tmax = Tmax
self.comment = comment
def isTemperatureValid(self, T):
"""
Return ``True`` if the temperature `T` in K is within the valid
temperature range of the thermodynamic data, or ``False`` if not.
"""
return self.Tmin <= T and T <= self.Tmax
def getHeatCapacity(self, T):
raise ThermoError('Unexpected call to ThermoModel.getHeatCapacity(); you should be using a class derived from ThermoModel.')
def getEnthalpy(self, T):
raise ThermoError('Unexpected call to ThermoModel.getEnthalpy(); you should be using a class derived from ThermoModel.')
def getEntropy(self, T):
raise ThermoError('Unexpected call to ThermoModel.getEntropy(); you should be using a class derived from ThermoModel.')
def getFreeEnergy(self, T):
raise ThermoError('Unexpected call to ThermoModel.getFreeEnergy(); you should be using a class derived from ThermoModel.')
def getHeatCapacities(self, Tlist):
return numpy.array([self.getHeatCapacity(T) for T in Tlist], numpy.float64)
def getEnthalpies(self, Tlist):
return numpy.array([self.getEnthalpy(T) for T in Tlist], numpy.float64)
def getEntropies(self, Tlist):
return numpy.array([self.getEntropy(T) for T in Tlist], numpy.float64)
def getFreeEnergies(self, Tlist):
return numpy.array([self.getFreeEnergy(T) for T in Tlist], numpy.float64)
################################################################################
class ThermoGAModel(ThermoModel):
"""
A thermodynamic model defined by a set of heat capacities. The attributes
are:
=========== =================== ============================================
Attribute Type Description
=========== =================== ============================================
`Tdata` ``numpy.ndarray`` The temperatures at which the heat capacity data is provided in K
`Cpdata` ``numpy.ndarray`` The standard heat capacity in J/mol*K at each temperature in `Tdata`
`H298` ``double`` The standard enthalpy of formation at 298 K in J/mol
`S298` ``double`` The standard entropy of formation at 298 K in J/mol*K
=========== =================== ============================================
"""
def __init__(self, Tdata=None, Cpdata=None, H298=0.0, S298=0.0, Tmin=0.0, Tmax=99999.9, comment=''):
ThermoModel.__init__(self, Tmin=Tmin, Tmax=Tmax, comment=comment)
self.Tdata = Tdata
self.Cpdata = Cpdata
self.H298 = H298
self.S298 = S298
def __repr__(self):
string = 'ThermoGAModel(Tdata=%s, Cpdata=%s, H298=%s, S298=%s)' % (self.Tdata, self.Cpdata, self.H298, self.S298)
return string
def __str__(self):
"""
Return a string summarizing the thermodynamic data.
"""
string = ''
string += 'Enthalpy of formation: %g kJ/mol\n' % (self.H298 / 1000.0)
string += 'Entropy of formation: %g J/mol*K\n' % (self.S298)
string += 'Heat capacity (J/mol*K): '
for T, Cp in zip(self.Tdata, self.Cpdata):
string += '%.1f(%g K) ' % (Cp,T)
string += '\n'
string += 'Comment: %s' % (self.comment)
return string
def __add__(self, other):
"""
Add two sets of thermodynamic data together. All parameters are
considered additive. Returns a new :class:`ThermoGAModel` object that is
the sum of the two sets of thermodynamic data.
"""
cython.declare(i=int, new=ThermoGAModel)
if len(self.Tdata) != len(other.Tdata) or any([T1 != T2 for T1, T2 in zip(self.Tdata, other.Tdata)]):
raise Exception('Cannot add these ThermoGAModel objects due to their having different temperature points.')
new = ThermoGAModel()
new.H298 = self.H298 + other.H298
new.S298 = self.S298 + other.S298
new.Tdata = self.Tdata
new.Cpdata = self.Cpdata + other.Cpdata
if self.comment == '': new.comment = other.comment
elif other.comment == '': new.comment = self.comment
else: new.comment = self.comment + ' + ' + other.comment
return new
def getHeatCapacity(self, T):
"""
Return the constant-pressure heat capacity (Cp) in J/mol*K at temperature `T` in K.
"""
cython.declare(Tmin=cython.double, Tmax=cython.double, Cpmin=cython.double, Cpmax=cython.double)
cython.declare(Cp=cython.double)
Cp = 0.0
if not self.isTemperatureValid(T):
raise ThermoError('Invalid temperature "%g K" for heat capacity estimation.' % T)
if T < numpy.min(self.Tdata):
Cp = self.Cpdata[0]
elif T >= numpy.max(self.Tdata):
Cp = self.Cpdata[-1]
else:
for Tmin, Tmax, Cpmin, Cpmax in zip(self.Tdata[:-1], self.Tdata[1:], self.Cpdata[:-1], self.Cpdata[1:]):
if Tmin <= T and T < Tmax:
Cp = (Cpmax - Cpmin) * ((T - Tmin) / (Tmax - Tmin)) + Cpmin
return Cp
def getEnthalpy(self, T):
"""
Return the enthalpy in J/mol at temperature `T` in K.
"""
cython.declare(H=cython.double, slope=cython.double, intercept=cython.double,
Tmin=cython.double, Tmax=cython.double, Cpmin=cython.double, Cpmax=cython.double)
H = self.H298
if not self.isTemperatureValid(T):
raise ThermoError('Invalid temperature "%g K" for enthalpy estimation.' % T)
for Tmin, Tmax, Cpmin, Cpmax in zip(self.Tdata[:-1], self.Tdata[1:], self.Cpdata[:-1], self.Cpdata[1:]):
if T > Tmin:
slope = (Cpmax - Cpmin) / (Tmax - Tmin)
intercept = (Cpmin * Tmax - Cpmax * Tmin) / (Tmax - Tmin)
if T < Tmax: H += 0.5 * slope * (T*T - Tmin*Tmin) + intercept * (T - Tmin)
else: H += 0.5 * slope * (Tmax*Tmax - Tmin*Tmin) + intercept * (Tmax - Tmin)
if T > self.Tdata[-1]:
H += self.Cpdata[-1] * (T - self.Tdata[-1])
return H
def getEntropy(self, T):
"""
Return the entropy in J/mol*K at temperature `T` in K.
"""
cython.declare(S=cython.double, slope=cython.double, intercept=cython.double,
Tmin=cython.double, Tmax=cython.double, Cpmin=cython.double, Cpmax=cython.double)
S = self.S298
if not self.isTemperatureValid(T):
raise ThermoError('Invalid temperature "%g K" for entropy estimation.' % T)
for Tmin, Tmax, Cpmin, Cpmax in zip(self.Tdata[:-1], self.Tdata[1:], self.Cpdata[:-1], self.Cpdata[1:]):
if T > Tmin:
slope = (Cpmax - Cpmin) / (Tmax - Tmin)
intercept = (Cpmin * Tmax - Cpmax * Tmin) / (Tmax - Tmin)
if T < Tmax: S += slope * (T - Tmin) + intercept * math.log(T/Tmin)
else: S += slope * (Tmax - Tmin) + intercept * math.log(Tmax/Tmin)
if T > self.Tdata[-1]:
S += self.Cpdata[-1] * math.log(T / self.Tdata[-1])
return S
def getFreeEnergy(self, T):
"""
Return the Gibbs free energy in J/mol at temperature `T` in K.
"""
if not self.isTemperatureValid(T):
raise ThermoError('Invalid temperature "%g K" for Gibbs free energy estimation.' % T)
return self.getEnthalpy(T) - T * self.getEntropy(T)
################################################################################
class WilhoitModel(ThermoModel):
"""
A thermodynamics model based on the Wilhoit equation for heat capacity,
.. math::
C_\\mathrm{p}(T) = C_\\mathrm{p}(0) + \\left[ C_\\mathrm{p}(\\infty) -
C_\\mathrm{p}(0) \\right] y^2 \\left[ 1 + (y - 1) \\sum_{i=0}^3 a_i y^i \\right]
where :math:`y \\equiv \\frac{T}{T + B}` is a scaled temperature that ranges
from zero to one. (The characteristic temperature :math:`B` is chosen by
default to be 500 K.) This formulation has the advantage of correctly
reproducting the heat capacity behavior as :math:`T \\rightarrow 0` and
:math:`T \\rightarrow \\infty`. The low-temperature limit
:math:`C_\\mathrm{p}(0)` is taken to be :math:`3.5R` for linear molecules
and :math:`4R` for nonlinear molecules. The high-temperature limit
:math:`C_\\mathrm{p}(\\infty)` is taken to be
:math:`\\left[ 3 N_\\mathrm{atoms} - 1.5 \\right] R` for linear molecules and
:math:`\\left[ 3 N_\\mathrm{atoms} - (2 + 0.5 N_\\mathrm{rotors}) \\right] R`
for nonlinear molecules, for a molecule composed of :math:`N_\\mathrm{atoms}`
atoms and :math:`N_\\mathrm{rotors}` internal rotors.
The Wilhoit parameters are stored in the attributes `cp0`, `cpInf`, `a0`,
`a1`, `a2`, `a3`, and `B`. There are also integration constants `H0` and
`S0` that are needed to evaluate the enthalpy and entropy, respectively.
"""
def __init__(self, cp0=0.0, cpInf=0.0, a0=0.0, a1=0.0, a2=0.0, a3=0.0, H0=0.0, S0=0.0, comment='', B=500.0):
ThermoModel.__init__(self, comment=comment)
self.cp0 = cp0
self.cpInf = cpInf
self.B = B
self.a0 = a0
self.a1 = a1
self.a2 = a2
self.a3 = a3
self.H0 = H0
self.S0 = S0
def __repr__(self):
"""
Return a string representation that can be used to reconstruct the
object.
"""
return 'WilhoitModel(cp0=%g, cpInf=%g, a0=%g, a1=%g, a2=%g, a3=%g, H0=%g, S0=%g, B=%g)' % (self.cp0, self.cpInf, self.a0, self.a1, self.a2, self.a3, self.H0, self.S0, self.B)
def getHeatCapacity(self, T):
"""
Return the constant-pressure heat capacity (Cp) in J/mol*K at the
specified temperature `T` in K.
"""
cython.declare(y=cython.double)
y = T/(T+self.B)
return self.cp0+(self.cpInf-self.cp0)*y*y*( 1 +
(y-1)*(self.a0 + y*(self.a1 + y*(self.a2 + y*self.a3))) )
def getEnthalpy(self, T):
"""
Return the enthalpy in J/mol at the specified temperature `T` in
K. The formula is
.. math::
H(T) & = H_0 +
C_\\mathrm{p}(0) T + \\left[ C_\\mathrm{p}(\\infty) - C_\\mathrm{p}(0) \\right] T \\\\
& \\left\\{ \\left[ 2 + \\sum_{i=0}^3 a_i \\right]
\\left[ \\frac{1}{2}y - 1 + \\left( \\frac{1}{y} - 1 \\right) \\ln \\frac{T}{y} \\right]
+ y^2 \\sum_{i=0}^3 \\frac{y^i}{(i+2)(i+3)} \\sum_{j=0}^3 f_{ij} a_j
\\right\\}
where :math:`f_{ij} = 3 + j` if :math:`i = j`, :math:`f_{ij} = 1` if
:math:`i > j`, and :math:`f_{ij} = 0` if :math:`i < j`.
"""
cython.declare(cp0=cython.double, cpInf=cython.double, B=cython.double, a0=cython.double, a1=cython.double, a2=cython.double, a3=cython.double)
cython.declare(y=cython.double, y2=cython.double, logBplust=cython.double)
cp0, cpInf, B, a0, a1, a2, a3 = self.cp0, self.cpInf, self.B, self.a0, self.a1, self.a2, self.a3
y = T/(T+B)
y2 = y*y
logBplust = math.log(B + T)
return self.H0 + cp0*T - (cpInf-cp0)*T*(y2*((3*a0 + a1 + a2 + a3)/6. + (4*a1 + a2 + a3)*y/12. + (5*a2 + a3)*y2/20. + a3*y2*y/5.) + (2 + a0 + a1 + a2 + a3)*( y/2. - 1 + (1/y-1)*logBplust))
def getEntropy(self, T):
"""
Return the entropy in J/mol*K at the specified temperature `T` in
K. The formula is
.. math::
S(T) = S_0 +
C_\\mathrm{p}(\\infty) \\ln T - \\left[ C_\\mathrm{p}(\\infty) - C_\\mathrm{p}(0) \\right]
\\left[ \\ln y + \\left( 1 + y \\sum_{i=0}^3 \\frac{a_i y^i}{2+i} \\right) y
\\right]
"""
cython.declare(cp0=cython.double, cpInf=cython.double, B=cython.double, a0=cython.double, a1=cython.double, a2=cython.double, a3=cython.double)
cython.declare(y=cython.double, logt=cython.double, logy=cython.double)
cp0, cpInf, B, a0, a1, a2, a3 = self.cp0, self.cpInf, self.B, self.a0, self.a1, self.a2, self.a3
y = T/(T+B)
logt = math.log(T)
logy = math.log(y)
return self.S0 + cpInf*logt-(cpInf-cp0)*(logy+y*(1+y*(a0/2+y*(a1/3 + y*(a2/4 + y*a3/5)))))
def getFreeEnergy(self, T):
"""
Return the Gibbs free energy in J/mol at the specified temperature
`T` in K.
"""
return self.getEnthalpy(T) - T * self.getEntropy(T)
def __residual(self, B, Tlist, Cplist, linear, nFreq, nRotors, H298, S298):
# The residual corresponding to the fitToData() method
# Parameters are the same as for that method
cython.declare(Cp_fit=numpy.ndarray)
self.fitToDataForConstantB(Tlist, Cplist, linear, nFreq, nRotors, B, H298, S298)
Cp_fit = self.getHeatCapacities(Tlist)
# Objective function is linear least-squares
return numpy.sum( (Cp_fit - Cplist) * (Cp_fit - Cplist) )
def fitToData(self, Tlist, Cplist, linear, nFreq, nRotors, H298, S298, B0=500.0):
"""
Fit a Wilhoit model to the data points provided, allowing the
characteristic temperature `B` to vary so as to improve the fit. This
procedure requires an optimization, using the ``fminbound`` function
in the ``scipy.optimize`` module. The data consists of a set
of dimensionless heat capacity points `Cplist` at a given set of
temperatures `Tlist` in K. The linearity of the molecule, number of
vibrational frequencies, and number of internal rotors (`linear`,
`nFreq`, and `nRotors`, respectively) is used to set the limits at
zero and infinite temperature.
"""
self.B = B0
import scipy.optimize
scipy.optimize.fminbound(self.__residual, 300.0, 3000.0, args=(Tlist, Cplist, linear, nFreq, nRotors, H298, S298))
return self
def fitToDataForConstantB(self, Tlist, Cplist, linear, nFreq, nRotors, B, H298, S298):
"""
Fit a Wilhoit model to the data points provided using a specified value
of the characteristic temperature `B`. The data consists of a set
of dimensionless heat capacity points `Cplist` at a given set of
temperatures `Tlist` in K. The linearity of the molecule, number of
vibrational frequencies, and number of internal rotors (`linear`,
`nFreq`, and `nRotors`, respectively) is used to set the limits at
zero and infinite temperature.
"""
cython.declare(y=numpy.ndarray, A=numpy.ndarray, b=numpy.ndarray, x=numpy.ndarray)
# Set the Cp(T) limits as T -> and T -> infinity
self.cp0 = 3.5 * constants.R if linear else 4.0 * constants.R
self.cpInf = self.cp0 + (nFreq + 0.5 * nRotors) * constants.R
# What remains is to fit the polynomial coefficients (a0, a1, a2, a3)
# This can be done directly - no iteration required
y = Tlist / (Tlist + B)
A = numpy.zeros((len(Cplist),4), numpy.float64)
for j in range(4):
A[:,j] = (y*y*y - y*y) * y**j
b = ((Cplist - self.cp0) / (self.cpInf - self.cp0) - y*y)
x, residues, rank, s = numpy.linalg.lstsq(A, b)
self.B = float(B)
self.a0 = float(x[0])
self.a1 = float(x[1])
self.a2 = float(x[2])
self.a3 = float(x[3])
self.H0 = 0.0; self.S0 = 0.0
self.H0 = H298 - self.getEnthalpy(298.15)
self.S0 = S298 - self.getEntropy(298.15)
return self
################################################################################
class NASAPolynomial(ThermoModel):
"""
A single NASA polynomial for thermodynamic data. The `coeffs` attribute
stores the seven polynomial coefficients
:math:`\\mathbf{a} = \\left[a_1\\ a_2\\ a_3\\ a_4\\ a_5\\ a_6\\ a_7 \\right]`
from which the relevant thermodynamic parameters are evaluated via the
expressions
.. math:: \\frac{C_\\mathrm{p}(T)}{R} = a_1 + a_2 T + a_3 T^2 + a_4 T^3 + a_5 T^4
.. math:: \\frac{H(T)}{RT} = a_1 + \\frac{1}{2} a_2 T + \\frac{1}{3} a_3 T^2 + \\frac{1}{4} a_4 T^3 + \\frac{1}{5} a_5 T^4 + \\frac{a_6}{T}
.. math:: \\frac{S(T)}{R} = a_1 \\ln T + a_2 T + \\frac{1}{2} a_3 T^2 + \\frac{1}{3} a_4 T^3 + \\frac{1}{4} a_5 T^4 + a_7
The above was adapted from `this page <http://www.me.berkeley.edu/gri-mech/data/nasa_plnm.html>`_.
"""
def __init__(self, Tmin=0.0, Tmax=0.0, coeffs=None, comment=''):
ThermoModel.__init__(self, Tmin=Tmin, Tmax=Tmax, comment=comment)
coeffs = coeffs or (0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0)
self.c0, self.c1, self.c2, self.c3, self.c4, self.c5, self.c6 = coeffs
def __repr__(self):
"""
Return a string representation that can be used to reconstruct the
object.
"""
return 'NASAPolynomial(Tmin=%g, Tmax=%g, coeffs=[%g, %g, %g, %g, %g, %g, %g])' % (self.Tmin, self.Tmax, self.c0, self.c1, self.c2, self.c3, self.c4, self.c5, self.c6)
def getHeatCapacity(self, T):
"""
Return the constant-pressure heat capacity (Cp) in J/mol*K at the
specified temperature `T` in K.
"""
# Cp/R = a1 + a2 T + a3 T^2 + a4 T^3 + a5 T^4
return (self.c0 + T*(self.c1 + T*(self.c2 + T*(self.c3 + self.c4*T)))) * constants.R
def getEnthalpy(self, T):
"""
Return the enthalpy in J/mol at the specified temperature `T` in
K.
"""
cython.declare(T2=cython.double, T4=cython.double)
T2 = T*T
T4 = T2*T2
# H/RT = a1 + a2 T /2 + a3 T^2 /3 + a4 T^3 /4 + a5 T^4 /5 + a6/T
return (self.c0 + self.c1*T/2 + self.c2*T2/3 + self.c3*T2*T/4 + self.c4*T4/5 + self.c5/T) * constants.R * T
def getEntropy(self, T):
"""
Return the entropy in J/mol*K at the specified temperature `T` in
K.
"""
cython.declare(T2=cython.double, T4=cython.double)
T2 = T*T
T4 = T2*T2
# S/R = a1 lnT + a2 T + a3 T^2 /2 + a4 T^3 /3 + a5 T^4 /4 + a7
return ( self.c0*math.log(T) + self.c1*T + self.c2*T2/2 +
self.c3*T2*T/3 + self.c4*T4/4 + self.c6 ) * constants.R
def getFreeEnergy(self, T):
"""
Return the Gibbs free energy in J/mol at the specified temperature
`T` in K.
"""
return self.getEnthalpy(T) - T * self.getEntropy(T)
def toCantera(self):
"""
Return a Cantera ctml_writer instance.
"""
import ctml_writer
return ctml_writer.NASA([self.Tmin,self.Tmax], [self.c0, self.c1, self.c2, self.c3, self.c4, self.c5, self.c6])
################################################################################
class NASAModel(ThermoModel):
"""
A set of thermodynamic parameters given by NASA polynomials. This class
stores a list of :class:`NASAPolynomial` objects in the `polynomials`
attribute. When evaluating a thermodynamic quantity, a polynomial that
contains the desired temperature within its valid range will be used.
"""
def __init__(self, polynomials=None, Tmin=0.0, Tmax=0.0, comment=''):
ThermoModel.__init__(self, Tmin=Tmin, Tmax=Tmax, comment=comment)
self.polynomials = polynomials or []
def __repr__(self):
"""
Return a string representation that can be used to reconstruct the
object.
"""
return 'NASAModel(Tmin=%g, Tmax=%g, polynomials=%s)' % (self.Tmin, self.Tmax, self.polynomials)
def getHeatCapacity(self, T):
"""
Return the constant-pressure heat capacity (Cp) in J/mol*K at the
specified temperatures `Tlist` in K.
"""
return self.__selectPolynomialForTemperature(T).getHeatCapacity(T)
def getEnthalpy(self, T):
"""
Return the enthalpy in J/mol at the specified temperatures `Tlist` in
K.
"""
return self.__selectPolynomialForTemperature(T).getEnthalpy(T)
def getEntropy(self, T):
"""
Return the entropy in J/mol*K at the specified temperatures `Tlist` in
K.
"""
return self.__selectPolynomialForTemperature(T).getEntropy(T)
def getFreeEnergy(self, T):
"""
Return the Gibbs free energy in J/mol at the specified temperatures
`Tlist` in K.
"""
return self.__selectPolynomialForTemperature(T).getFreeEnergy(T)
def __selectPolynomialForTemperature(self, T):
poly = cython.declare(NASAPolynomial)
for poly in self.polynomials:
if poly.isTemperatureValid(T): return poly
else:
raise ThermoError("No valid NASA polynomial found for T=%g K" % T)
def toCantera(self):
"""
Return a Cantera ctml_writer instance.
"""
return tuple([poly.toCantera() for poly in self.polynomials])
################################################################################
|
jwallen/ChemPy
|
chempy/thermo.py
|
Python
|
mit
| 24,040
|
[
"ChemPy"
] |
5e7dadf85f333953b6d35d9049bbe9b8b870b5cfa2fbc0536d011ae08183ea69
|
# Copyright 2009 Brian Quinlan. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Execute computations asynchronously using threads or processes."""
__author__ = 'Brian Quinlan (brian@sweetapp.com)'
from serge.blocks.concurrent.futures._base import (FIRST_COMPLETED,
FIRST_EXCEPTION,
ALL_COMPLETED,
CancelledError,
TimeoutError,
Future,
Executor,
wait,
as_completed)
from serge.blocks.concurrent.futures.process import ProcessPoolExecutor
from serge.blocks.concurrent.futures.thread import ThreadPoolExecutor
|
smmosquera/serge
|
blocks/concurrent/futures/__init__.py
|
Python
|
lgpl-3.0
| 839
|
[
"Brian"
] |
46d9dcc05721a93d1ce88306046b35bfccddcff45e7aeef9b1fdb59d11b01256
|
#
# This source file is part of appleseed.
# Visit http://appleseedhq.net/ for additional information and resources.
#
# This software is released under the MIT license.
#
# Copyright (c) 2017 Petra Gospodnetic, The appleseedhq Organization
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
import math
import signal
import sys
import time
import threading
import os
import appleseed as asr
output_scene_name = "4-colored-point-lights"
def build_project():
# Create an empty project.
project = asr.Project("4-point-lights")
paths = project.get_search_paths()
paths.append("data")
project.set_search_paths(paths)
# Add default configurations to the project.
project.add_default_configurations()
# Set the number of samples. This is basically the quality parameter: the higher the number
# of samples, the smoother the image but the longer the rendering time.
# todo: fix.
conf = project.configurations()["final"]
conf.insert_path("uniform_pixel_renderer.samples", 1)
# Create a scene.
scene = asr.Scene()
# Create an assembly.
assembly = asr.Assembly("assembly")
# Prepare the orientation of all the objects in the scene.
orientation = asr.Matrix4d.make_rotation(asr.Vector3d(1.0, 0.0, 0.0), math.radians(-90.0))
#------------------------------------------------------------------------
# Materials
#------------------------------------------------------------------------
# Create a material called "01 - Default_mat" and insert it into the assembly.
assembly.materials().insert(asr.Material(
"disney_material",
"01 - Default_mat",
{
"alpha_map": "1",
"layer1": {
"anisotropic": "0",
"base_color": "[1, 1, 1]",
"clearcoat": "0",
"clearcoat_gloss": "0",
"layer_name": "layer1",
"layer_number": "0",
"mask": "1.0",
"metallic": "0",
"roughness": "1",
"sheen": "0",
"sheen_tint": "0",
"specular": "0",
"specular_tint": "0",
"subsurface": "0.0"
}
}))
#------------------------------------------------------------------------
# Geometry
#------------------------------------------------------------------------
# Load the scene geometry from disk.
objects = asr.MeshObjectReader.read(project.get_search_paths(), "plane", {"filename": "Plane001.binarymesh"})
# Insert all the objects into the assembly.
for object in objects:
# Create an instance of this object and insert it into the assembly.
instance_name = object.get_name() + "_inst"
material_name = {"material_slot_0": "01 - Default_mat"}
mat = orientation * asr.Matrix4d.make_translation(asr.Vector3d(0.0, 0.0, 0.0))
instance = asr.ObjectInstance(
instance_name,
{"visibility":
{
"camera": "true",
"diffuse": "true",
"glossy": "true",
"light": "true",
"probe": "true",
"shadow": "true",
"specular": "true",
"subsurface": "true",
"transparency": "true"
}},
object.get_name(),
asr.Transformd(mat),
material_name,
material_name)
assembly.object_instances().insert(instance)
# Insert this object into the scene.
assembly.objects().insert(object)
#------------------------------------------------------------------------
# Lights
#------------------------------------------------------------------------
# Create a list of colors and for each of them create a light.
light_colors = {
"white": [1.0, 1.0, 1.0],
"red": [1.0, 0.0, 0.0],
"green": [0.0, 1.0, 0.0],
"blue": [0.0, 0.0, 1.0]
}
light_positions = [
asr.Vector3d(25.0, -25.0, 5.0),
asr.Vector3d(-25.0, -25.0, 5.0),
asr.Vector3d(25.0, 25.0, 5.0),
asr.Vector3d(-25.0, 25.0, 5.0)
]
for key in light_colors:
color_name = "color_" + key
# Add colors to the project.
assembly.colors().insert(asr.ColorEntity(color_name, {"color_space": "linear_rgb", "multiplier": 1.0}, light_colors[key]))
idx = light_colors.keys().index(key)
light_name = "light_" + key
# Create the light.
light = asr.Light("max_omni_light", light_name, {
"decay_exponent": "0",
"decay_start": "40",
"intensity": color_name,
"intensity_multiplier": "3.14159"
})
mat = orientation * asr.Matrix4d.make_translation(light_positions[idx])
light.set_transform(asr.Transformd(mat))
assembly.lights().insert(light)
#------------------------------------------------------------------------
# Assembly instance
#------------------------------------------------------------------------
# Create an instance of the assembly and insert it into the scene.
assembly_inst = asr.AssemblyInstance("assembly_inst", {}, assembly.get_name())
assembly_inst.transform_sequence().set_transform(0.0, asr.Transformd(asr.Matrix4d.identity()))
scene.assembly_instances().insert(assembly_inst)
# Insert the assembly into the scene.
scene.assemblies().insert(assembly)
#------------------------------------------------------------------------
# Environment
#------------------------------------------------------------------------
# Create an environment called "env" and bind it to the scene.
scene.set_environment(asr.Environment("env", {}))
#------------------------------------------------------------------------
# Camera
#------------------------------------------------------------------------
# Create an orthographic camera with film dimensions 128 x 128 in.
params = {
"controller_target": "0 0 0",
"film_dimensions": "128 128",
"near_z": "-0.1",
"shutter_close_time": "1.0",
"shutter_open_time": "0.0"
}
camera = asr.Camera("orthographic_camera", "camera", params)
# Place and orient the camera. By default cameras are located in (0.0, 0.0, 0.0)
# and are looking toward Z- (0.0, 0.0, -1.0).
mat = orientation * asr.Matrix4d.make_translation(asr.Vector3d(0.0, 0.0, 0.0))
camera.transform_sequence().set_transform(0.0, asr.Transformd(mat))
# Bind the camera to the scene.
scene.cameras().insert(camera)
#------------------------------------------------------------------------
# Frame
#------------------------------------------------------------------------
# Create a frame and bind it to the project.
params = {
"camera": "camera",
"clamping": "false",
"color_space": "srgb",
"filter": "box",
"filter_size": "0.5",
"gamma_correction": "1.0",
"pixel_format": "float",
"premultiplied_alpha": "true",
"resolution": "512 512",
"tile_size": "64 64"}
project.set_frame(asr.Frame("beauty", params))
# Bind the scene to the project.
project.set_scene(scene)
return project
def main():
# Build the project.
project = build_project()
# Save the project to disk.
asr.ProjectFileWriter().write(project, output_scene_name + ".appleseed")
if __name__ == "__main__":
main()
|
aytekaman/appleseed
|
sandbox/tests/test scenes/many light sampling/generators/4_point_lights_test_scene.py
|
Python
|
mit
| 8,605
|
[
"VisIt"
] |
e5211b66bd4c6f17338a3895c124def743ad93e1e3794e3ba4645e3660b098a7
|
from ase.tasks.main import run
atoms, task = run("nwchem molecule O2 O -p task=gradient")
atoms, task = run('nwchem molecule O2 O -s')
ae = 2 * task.data['O']['energy'] - task.data['O2']['energy']
assert abs(ae - 6.605) < 1e-3
|
alexei-matveev/ase-local
|
ase/test/nwchem/nwchem_cmdline.py
|
Python
|
gpl-2.0
| 228
|
[
"ASE",
"NWChem"
] |
1c446877f4d080e58180341f6bb720c03e86dfede899dcafd02f3c12650c0339
|
import serial
import inspect
import time
import itertools
from util import two_byte_iter_to_str, to_two_bytes
# Message command bytes - straight from Firmata.h
DIGITAL_MESSAGE = 0x90 # send data for a digital pin
ANALOG_MESSAGE = 0xE0 # send data for an analog pin (or PWM)
DIGITAL_PULSE = 0x91 # SysEx command to send a digital pulse
# PULSE_MESSAGE = 0xA0 # proposed pulseIn/Out msg (SysEx)
# SHIFTOUT_MESSAGE = 0xB0 # proposed shiftOut msg (SysEx)
REPORT_ANALOG = 0xC0 # enable analog input by pin #
REPORT_DIGITAL = 0xD0 # enable digital input by port pair
START_SYSEX = 0xF0 # start a MIDI SysEx msg
SET_PIN_MODE = 0xF4 # set a pin to INPUT/OUTPUT/PWM/etc
END_SYSEX = 0xF7 # end a MIDI SysEx msg
REPORT_VERSION = 0xF9 # report firmware version
SYSTEM_RESET = 0xFF # reset from MIDI
QUERY_FIRMWARE = 0x79 # query the firmware name
# extended command set using sysex (0-127/0x00-0x7F)
# 0x00-0x0F reserved for user-defined commands */
SERVO_CONFIG = 0x70 # set max angle, minPulse, maxPulse, freq
STRING_DATA = 0x71 # a string message with 14-bits per char
SHIFT_DATA = 0x75 # a bitstream to/from a shift register
I2C_REQUEST = 0x76 # send an I2C read/write request
I2C_REPLY = 0x77 # a reply to an I2C read request
I2C_CONFIG = 0x78 # config I2C settings such as delay times and power pins
REPORT_FIRMWARE = 0x79 # report name and version of the firmware
SAMPLING_INTERVAL = 0x7A # set the poll rate of the main loop
SYSEX_NON_REALTIME = 0x7E # MIDI Reserved for non-realtime messages
SYSEX_REALTIME = 0x7F # MIDI Reserved for realtime messages
# Pin modes.
# except from UNAVAILABLE taken from Firmata.h
UNAVAILABLE = -1
INPUT = 0 # as defined in wiring.h
OUTPUT = 1 # as defined in wiring.h
ANALOG = 2 # analog pin in analogInput mode
PWM = 3 # digital pin in PWM output mode
SERVO = 4 # digital pin in SERVO mode
# Pin types
DIGITAL = OUTPUT # same as OUTPUT below
# ANALOG is already defined above
class PinAlreadyTakenError(Exception):
pass
class InvalidPinDefError(Exception):
pass
class NoInputWarning(RuntimeWarning):
pass
class Board(object):
"""
Base class for any board
"""
firmata_version = None
firmware = None
firmware_version = None
_command_handlers = {}
_command = None
_stored_data = []
_parsing_sysex = False
def __init__(self, port, layout, baudrate=57600, name=None):
self.sp = serial.Serial(port, baudrate)
# Allow 5 secs for Arduino's auto-reset to happen
# Alas, Firmata blinks it's version before printing it to serial
# For 2.3, even 5 seconds might not be enough.
# TODO Find a more reliable way to wait until the board is ready
self.pass_time(5)
self.name = name
if not self.name:
self.name = port
self.setup_layout(layout)
# Iterate over the first messages to get firmware data
while self.bytes_available():
self.iterate()
# TODO Test whether we got a firmware name and version, otherwise there
# probably isn't any Firmata installed
def __str__(self):
return "Board %s on %s" % (self.name, self.sp.port)
def __del__(self):
'''
The connection with the a board can get messed up when a script is
closed without calling board.exit() (which closes the serial
connection). Therefore also do it here and hope it helps.
'''
self.exit()
def send_as_two_bytes(self, val):
self.sp.write(chr(val % 128) + chr(val >> 7))
def setup_layout(self, board_layout):
"""
Setup the Pin instances based on the given board-layout. Maybe it will
be possible to do this automatically in the future, by polling the
board for its type.
"""
# Create pin instances based on board layout
self.analog = []
for i in board_layout['analog']:
self.analog.append(Pin(self, i))
# Only create digital ports if the Firmata can use them (ie. not on the Mega...)
# TODO Why is (TOTAL_FIRMATA_PINS + 7) / 8 used in Firmata?
if board_layout['use_ports']:
self.digital = []
self.digital_ports = []
for i in range(len(board_layout['digital']) / 7):
self.digital_ports.append(Port(self, i))
# Allow to access the Pin instances directly
for port in self.digital_ports:
self.digital += port.pins
for i in board_layout['pwm']:
self.digital[i].PWM_CAPABLE = True
else:
self.digital = []
for i in board_layout['digital']:
self.digital.append(Pin(self.sp, i, type=DIGITAL))
# Disable certain ports like Rx/Tx and crystal ports
for i in board_layout['disabled']:
self.digital[i].mode = UNAVAILABLE
# Create a dictionary of 'taken' pins. Used by the get_pin method
self.taken = { 'analog' : dict(map(lambda p: (p.pin_number, False), self.analog)),
'digital' : dict(map(lambda p: (p.pin_number, False), self.digital)) }
# Setup default handlers for standard incoming commands
self.add_cmd_handler(ANALOG_MESSAGE, self._handle_analog_message)
self.add_cmd_handler(DIGITAL_MESSAGE, self._handle_digital_message)
self.add_cmd_handler(REPORT_VERSION, self._handle_report_version)
self.add_cmd_handler(REPORT_FIRMWARE, self._handle_report_firmware)
def add_cmd_handler(self, cmd, func):
"""
Adds a command handler for a command.
"""
len_args = len(inspect.getargspec(func)[0])
def add_meta(f):
def decorator(*args, **kwargs):
f(*args, **kwargs)
decorator.bytes_needed = len_args - 1 # exclude self
decorator.__name__ = f.__name__
return decorator
func = add_meta(func)
self._command_handlers[cmd] = func
def get_pin(self, pin_def):
"""
Returns the activated pin given by the pin definition.
May raise an ``InvalidPinDefError`` or a ``PinAlreadyTakenError``.
:arg pin_def: Pin definition as described in TODO,
but without the arduino name. So for example ``a:1:i``.
"""
if type(pin_def) == list:
bits = pin_def
else:
bits = pin_def.split(':')
a_d = bits[0] == 'a' and 'analog' or 'digital'
part = getattr(self, a_d)
pin_nr = int(bits[1])
if pin_nr >= len(part):
raise InvalidPinDefError('Invalid pin definition: %s at position 3 on %s' % (pin_def, self.name))
if getattr(part[pin_nr], 'mode', None) == UNAVAILABLE:
raise InvalidPinDefError('Invalid pin definition: UNAVAILABLE pin %s at position on %s' % (pin_def, self.name))
if self.taken[a_d][pin_nr]:
raise PinAlreadyTakenError('%s pin %s is already taken on %s' % (a_d, bits[1], self.name))
# ok, should be available
pin = part[pin_nr]
self.taken[a_d][pin_nr] = True
if pin.type is DIGITAL:
if bits[2] == 'p':
pin.mode = PWM
elif bits[2] == 's':
pin.mode = SERVO
elif bits[2] is not 'o':
pin.mode = INPUT
else:
pin.enable_reporting()
return pin
def pass_time(self, t):
"""
Non-blocking time-out for ``t`` seconds.
"""
cont = time.time() + t
while time.time() < cont:
time.sleep(0)
def send_sysex(self, sysex_cmd, data=[]):
"""
Sends a SysEx msg.
:arg sysex_cmd: A sysex command byte
:arg data: A list of 7-bit bytes of arbitrary data (bytes may be
already converted to chr's)
"""
self.sp.write(chr(START_SYSEX))
self.sp.write(chr(sysex_cmd))
for byte in data:
try:
byte = chr(byte)
except TypeError:
pass # byte is already a chr
except ValueError:
raise ValueError('Sysex data can be 7-bit bytes only. '
'Consider using utils.to_two_bytes for bigger bytes.')
self.sp.write(byte)
self.sp.write(chr(END_SYSEX))
def bytes_available(self):
return self.sp.inWaiting()
def iterate(self):
"""
Reads and handles data from the microcontroller over the serial port.
This method should be called in a main loop, or in an
:class:`Iterator` instance to keep this boards pin values up to date
"""
byte = self.sp.read()
if not byte:
return
data = ord(byte)
received_data = []
handler = None
if data < START_SYSEX:
# These commands can have 'channel data' like a pin nummber appended.
try:
handler = self._command_handlers[data & 0xF0]
except KeyError:
return
received_data.append(data & 0x0F)
while len(received_data) < handler.bytes_needed:
received_data.append(ord(self.sp.read()))
elif data == START_SYSEX:
data = ord(self.sp.read())
handler = self._command_handlers.get(data)
if not handler:
return
data = ord(self.sp.read())
while data != END_SYSEX:
received_data.append(data)
data = ord(self.sp.read())
else:
try:
handler = self._command_handlers[data]
except KeyError:
return
while len(received_data) < handler.bytes_needed:
received_data.append(ord(self.sp.read()))
# Handle the data
try:
handler(*received_data)
except ValueError:
pass
def get_firmata_version(self):
"""
Returns a version tuple (major, mino) for the firmata firmware on the
board.
"""
return self.firmata_version
def servo_config(self, pin, min_pulse=544, max_pulse=2400, angle=0):
"""
Configure a pin as servo with min_pulse, max_pulse and first angle.
``min_pulse`` and ``max_pulse`` default to the arduino defaults.
"""
if pin > len(self.digital) or self.digital[pin].mode == UNAVAILABLE:
raise IOError("Pin %s is not a valid servo pin")
data = itertools.chain([pin], to_two_bytes(min_pulse),
to_two_bytes(max_pulse))
self.send_sysex(SERVO_CONFIG, data)
# set pin._mode to SERVO so that it sends analog messages
# don't set pin.mode as that calls this method
self.digital[pin]._mode = SERVO
self.digital[pin].write(angle)
def exit(self):
""" Call this to exit cleanly. """
# First detach all servo's, otherwise it somehow doesn't want to close...
# FIXME
for pin in self.digital:
if pin.mode == SERVO:
pin.mode = OUTPUT
if hasattr(self, 'sp'):
self.sp.close()
# Command handlers
def _handle_analog_message(self, pin_nr, lsb, msb):
value = round(float((msb << 7) + lsb) / 1023, 4)
# Only set the value if we are actually reporting
try:
if self.analog[pin_nr].reporting:
self.analog[pin_nr].value = value
except IndexError:
raise ValueError
def _handle_digital_message(self, port_nr, lsb, msb):
"""
Digital messages always go by the whole port. This means we have a
bitmask wich we update the port.
"""
mask = (msb << 7) + lsb
try:
self.digital_ports[port_nr]._update(mask)
except IndexError:
raise ValueError
def _handle_report_version(self, major, minor):
self.firmata_version = (major, minor)
def _handle_report_firmware(self, *data):
major = data[0]
minor = data[1]
self.firmware_version = (major, minor)
self.firmware = two_byte_iter_to_str(data[2:])
class Port(object):
""" An 8-bit port on the board """
def __init__(self, board, port_number):
self.board = board
self.port_number = port_number
self.reporting = False
self.pins = []
for i in range(8):
pin_nr = i + self.port_number * 8
self.pins.append(Pin(self.board, pin_nr, type=DIGITAL, port=self))
def __str__(self):
return "Digital Port %i on %s" % (self.port_number, self.board)
def enable_reporting(self):
""" Enable reporting of values for the whole port """
self.reporting = True
msg = chr(REPORT_DIGITAL + self.port_number)
msg += chr(1)
self.board.sp.write(msg)
for pin in self.pins:
if pin.mode == INPUT:
pin.reporting = True # TODO Shouldn't this happen at the pin?
def disable_reporting(self):
""" Disable the reporting of the port """
self.reporting = False
msg = chr(REPORT_DIGITAL + self.port_number)
msg += chr(0)
self.board.sp.write(msg)
def write(self):
"""Set the output pins of the port to the correct state"""
mask = 0
for pin in self.pins:
if pin.mode == OUTPUT:
if pin.value == 1:
pin_nr = pin.pin_number - self.port_number * 8
mask |= 1 << pin_nr
msg = chr(DIGITAL_MESSAGE + self.port_number)
msg += chr(mask % 128)
msg += chr(mask >> 7)
self.board.sp.write(msg)
def _update(self, mask):
"""
Update the values for the pins marked as input with the mask.
"""
if self.reporting:
for pin in self.pins:
if pin.mode is INPUT:
pin_nr = pin.pin_number - self.port_number * 8
pin.value = (mask & (1 << pin_nr)) > 0
class Pin(object):
""" A Pin representation """
def __init__(self, board, pin_number, type=ANALOG, port=None):
self.board = board
self.pin_number = pin_number
self.type = type
self.port = port
self.PWM_CAPABLE = False
self._mode = (type == DIGITAL and OUTPUT or INPUT)
self.reporting = False
self.value = None
def __str__(self):
type = {ANALOG : 'Analog', DIGITAL : 'Digital'}[self.type]
return "%s pin %d" % (type, self.pin_number)
def _set_mode(self, mode):
if mode is UNAVAILABLE:
self._mode = UNAVAILABLE
return
if self._mode is UNAVAILABLE:
raise IOError("%s can not be used through Firmata" % self)
if mode is PWM and not self.PWM_CAPABLE:
raise IOError("%s does not have PWM capabilities" % self)
if mode == SERVO:
if self.type != DIGITAL:
raise IOError("Only digital pins can drive servos! %s is not"
"digital" % self)
self._mode = SERVO
self.board.servo_config(self.pin_number)
return
# Set mode with SET_PIN_MODE message
self._mode = mode
command = chr(SET_PIN_MODE)
command += chr(self.pin_number)
command += chr(mode)
self.board.sp.write(command)
if mode == INPUT:
self.enable_reporting()
def _get_mode(self):
return self._mode
mode = property(_get_mode, _set_mode)
"""
Mode of operation for the pin. Can be one of the pin modes: INPUT, OUTPUT,
ANALOG, PWM or SERVO (or UNAVAILABLE)
"""
def enable_reporting(self):
""" Set an input pin to report values """
if self.mode is not INPUT:
raise IOError, "%s is not an input and can therefore not report" % self
if self.type == ANALOG:
self.reporting = True
msg = chr(REPORT_ANALOG + self.pin_number)
msg += chr(1)
self.board.sp.write(msg)
else:
self.port.enable_reporting() # TODO This is not going to work for non-optimized boards like Mega
def disable_reporting(self):
""" Disable the reporting of an input pin """
if self.type == ANALOG:
self.reporting = False
msg = chr(REPORT_ANALOG + self.pin_number)
msg += chr(0)
self.board.sp.write(msg)
else:
self.port.disable_reporting() # TODO This is not going to work for non-optimized boards like Mega
def read(self):
"""
Returns the output value of the pin. This value is updated by the
boards :meth:`Board.iterate` method. Value is alway in the range 0.0 - 1.0
"""
if self.mode == UNAVAILABLE:
raise IOError, "Cannot read pin %s"% self.__str__()
return self.value
def write(self, value):
"""
Output a voltage from the pin
:arg value: Uses value as a boolean if the pin is in output mode, or
expects a float from 0 to 1 if the pin is in PWM mode. If the pin
is in SERVO the value should be in degrees.
"""
if self.mode is UNAVAILABLE:
raise IOError, "%s can not be used through Firmata" % self
if self.mode is INPUT:
raise IOError, "%s is set up as an INPUT and can therefore not be written to" % self
if value is not self.value:
self.value = value
if self.mode is OUTPUT:
if self.port:
self.port.write()
else:
msg = chr(DIGITAL_MESSAGE)
msg += chr(self.pin_number)
msg += chr(value)
self.board.sp.write(msg)
elif self.mode is PWM:
value = int(round(value * 255))
msg = chr(ANALOG_MESSAGE + self.pin_number)
msg += chr(value % 128)
msg += chr(value >> 7)
self.board.sp.write(msg)
elif self.mode is SERVO:
value = int(value)
msg = chr(ANALOG_MESSAGE + self.pin_number)
msg += chr(value % 128)
msg += chr(value >> 7)
self.board.sp.write(msg)
|
hatchetation/pyFirmata
|
pyfirmata/pyfirmata.py
|
Python
|
bsd-3-clause
| 18,886
|
[
"CRYSTAL"
] |
f4bec138938b581f06d35160ef1a96b4921fa1756744359471222cb357a903a6
|
#galaxy.py
from __future__ import division
import numpy
import math
import matplotlib
import matplotlib.pyplot
import time
import pickle
import sys
from custom import loopProgress
from custom import rotate
from custom import build_distance_matrix
from custom import partitionData
class galaxy (object):
def __init__ (self, size, emitterList):
MASS = 0; X = 1; Y = 2
#parameters
self.galaxyMass = 0
self.ejected_mass = 0
self.size = size
self.center_point = (int(size / 2), int(size / 2))
#emitters
self.emitterList = emitterList
for i, emitter in enumerate(self.emitterList):
self.emitterList[i] = galaxy.emitter(self, emitter[MASS], emitter[X], emitter[Y])
#data matrixes
#self.distance_matrix = pickle.load(open("data/matrix"+str(size)+".p", "rb")) #not tested yet!!!
self.masses = numpy.zeros((size, size))
self.x_velocities = numpy.zeros((size, size))
self.y_velocities = numpy.zeros((size, size))
self._masses = numpy.empty((size, size))
self._x_velocities = numpy.empty((size, size))
self._y_velocities = numpy.empty((size, size))
self.partitionInstance = partitionData(self.masses)
def time_step (self):
self._masses.fill(0)
self._x_velocities.fill(0)
self._y_velocities.fill(0)
for emitter in self.emitterList:
emitter.emit()
self._gravitate()
self._move()
#self._diffuse()
self.masses, self._masses = self._masses, self.masses
self.x_velocities, self._x_velocities = self._x_velocities, self.x_velocities
self.y_velocities, self._y_velocities = self._y_velocities, self.y_velocities
def _add_to_location (self, mass, x, y, x_velocity, y_velocity):
try:
x = int(round(x)); y = int(round(y))
except ValueError:
pass
#print("adding to point ",x,y)
if x>=self.size or x<0 or y>=self.size or y<0 or math.isnan(x) or math.isnan(y):
self.ejected_mass += mass
self.galaxyMass -= mass
#print("offmap")
elif self._masses[x, y] == 0:
self._masses[x, y] = mass
self._x_velocities[x, y] = x_velocity
self._y_velocities[x, y] = y_velocity
else:
total_mass = self._masses[x, y] + mass
self._masses[x, y] += total_mass
self._x_velocities[x, y] = (self._x_velocities[x, y] * self._masses[x, y] + x_velocity * mass) / total_mass
self._y_velocities[x, y] = (self._y_velocities[x, y] * self._masses[x, y] + y_velocity * mass) / total_mass
def _move (self):
for location, mass_here in numpy.ndenumerate(self.masses):
X = 0; Y = 1
if not mass_here: continue
x_velocity = self.x_velocities[location]
y_velocity = self.y_velocities[location]
self._add_to_location(mass_here, location[X]+x_velocity, location[Y]+y_velocity, x_velocity, y_velocity)
def _gravitate (self):
'''
[BACKGROUND]
Under ideal circumstances, you'd gravitate every point in the galaxy towards
every other point in the galaxy [note: the "galaxy" here is defined as an
area where each point has a gas density, there are no solids]. That's an
extrememly expensive computation, on the order of size^4.
To help with that, what you can instead do is divide the galaxy into a
series of partitions, and simplify each one into a point mass. This is essentially
treating each partition as if it were a solid mass. Similarly to gravitating to
actual solid masses, this simplification doesn't [to my knowledge] produce
any issues so long as the origin point of the comparison is not contained
within the solid mass [i.e. the partition]
[IMPLEMENTATION]
At the start of the simulation, assign every point to a partition. Then for every
time step, get the COM [center of mass] and the total mass for for each partition.
Then for every point, compare it to all the partitions, except the partition that
it is inside of. For that partition, compare against every point.
'''
self.partitionInstance.data = self.masses #set points
self.partitionInstance.calculateCenterOfMass() #calc COM
for origin_point, origin_partition in self.partitionInstance.pointsToPartition.items():
if not self.masses[local_point]:
continue
compare_against = list()
for local_point in self.partitionInstance.partitionToPoints[origin_partition]:
if local_point == origin_point:
continue
compare_against.append((local_point,self.masses[local_point]))
for other_partition, partition_center_of_mass in self.partitionInstance.partitionCenterOfMass.items():
if origin_partition == other_partition:
continue
compare_against.append((partition_center_of_mass,self.partitionInstance.partitionMass[other_partition]))
self._apply_force_vector(origin_point,compare_against)
def _apply_force_vector (origin_point, compare_against):
self._add_to_location()
'''
for location_here, mass_here in numpy.ndenumerate(self.masses):
X = 0; Y = 1; G = .1
if not mass_here: continue
distance_matrix_here = self.distance_matrix[location_here]
acceleration_matrix = G * self.masses / (0.01+distance_matrix_here**2)
x_velocity_change = 0
y_velocity_change = 0
for location_there, acceleration in numpy.ndenumerate(acceleration_matrix):
if acceleration == 0 or (location_here == location_there): continue
relative_x = location_there[X]-location_here[X]
relative_y = location_there[Y]-location_here[Y]
distance_to = distance_matrix_here[location_there[X], location_there[Y]]
if distance_to == 0: continue
x_velocity_change += relative_x*acceleration/distance_to
y_velocity_change += relative_y*acceleration/distance_to
self.x_velocities[location_here] += x_velocity_change
self.y_velocities[location_here] += y_velocity_change
'''
def _buildForceVector (origin, comparisonPoints):
self._add_to_location
#very experimental diffusion code
'''
def _diffuse (self):
MASS = 0; X = 1; Y = 2
spreads = list()
for location, mass in numpy.ndenumerate(self.masses):
if (not mass) or (mass<1): continue
xVelocity = self.x_velocities[location]
yVelocity = self.y_velocities[location]
spreadinst = dict()
pil = dict()
sumdist = 0
for p,d in sphere.items():
px,py = p[0]+x,p[1]+y
try:
sumdist += d
data[px,py]
pil[px,py] = d
except KeyError: pass
#print("pil ",pil)
for p,d in pil.items():
px,py = p[0],p[1]
div = d/sumdist
spreadinst[px,py] = dict(vx=vx,vy=vy,m=m*div)
#print("spread ",spreadinst)
spreads.append(spreadinst)
for spreadinst in spreads:
for point,value in spreadinst.items():
x,y = point[0],point[1]
m = value["m"]
vx = value["vx"]
vy = value["vy"]
self._add_to_location(self.mass, self.x, self.y, 0, 0)
'''
class emitter (object):
def __init__ (self, galaxy, mass, x, y):
self.galaxy = galaxy
self.mass = mass
self.x = x
self.y = y
def emit (self):
self.galaxy._add_to_location(self.mass, self.x, self.y, 0, 0)
self.galaxy.galaxyMass += self.mass
self._spin()
def _spin (self):
self.x, self.y = rotate(self.x, self.y, math.pi/128, self.galaxy.center_point)
if __name__ == "__main__":
#argument parsing
try: size = int(sys.argv[1])
except IndexError:
print("no size given, defaulting to 10")
size = 10
try: maxFrames = int(sys.argv[2])
except IndexError:
print("no frames given, defaulting to 20")
maxFrames = 20
#galaxy inits
emitterTop = (10, round(size/10), size/2)
emitterBottom = (10, round(9*size/10), size/2)
emitterLeft = (10, size/2, round(size/10))
emitterRight = (10, size/2, round(9*size/10))
emitterCenter = (10, size/2, size/2)
galaxy = galaxy(size, [emitterCenter])
#loop inits
frames = list()
frames.append(numpy.copy(galaxy.masses))
pb = loopProgress(maxFrames)
for i in range(maxFrames):
galaxy.time_step()
frames.append(numpy.copy(galaxy.masses))
pb.update(i)
#save
print("\nCreating: data/output_s"+str(size)+"_f"+str(maxFrames))
numpy.save("data/output_s"+str(size)+"_f"+str(maxFrames), frames)
|
LynnCo/galaxySim
|
galaxy.py
|
Python
|
mit
| 9,254
|
[
"Galaxy"
] |
4d868316b657b2a60c7026316cf3fc28c445e5d21d6e20d8c30e48ced98050a7
|
# CREATED:2015-02-17 14:41:28 by Brian McFee <brian.mcfee@nyu.edu>
# this function is lifted wholesale from matploblib v1.4.2,
# and modified so that images are stored explicitly under the tests path
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import functools
import gc
import os
import sys
import shutil
import warnings
import unittest
import nose
import numpy as np
import matplotlib.units
from matplotlib import cbook
from matplotlib import ticker
from matplotlib import pyplot as plt
from matplotlib import ft2font
from matplotlib.testing.noseclasses import KnownFailureTest, \
KnownFailureDidNotFailTest, ImageComparisonFailure
from matplotlib.testing.compare import comparable_formats, compare_images, \
make_test_filename
def knownfailureif(fail_condition, msg=None, known_exception_class=None ):
"""
Assume a will fail if *fail_condition* is True. *fail_condition*
may also be False or the string 'indeterminate'.
*msg* is the error message displayed for the test.
If *known_exception_class* is not None, the failure is only known
if the exception is an instance of this class. (Default = None)
"""
# based on numpy.testing.dec.knownfailureif
if msg is None:
msg = 'Test known to fail'
def known_fail_decorator(f):
# Local import to avoid a hard nose dependency and only incur the
# import time overhead at actual test-time.
import nose
def failer(*args, **kwargs):
try:
# Always run the test (to generate images).
result = f(*args, **kwargs)
except Exception as err:
if fail_condition:
if known_exception_class is not None:
if not isinstance(err,known_exception_class):
# This is not the expected exception
raise
# (Keep the next ultra-long comment so in shows in console.)
raise KnownFailureTest(msg) # An error here when running nose means that you don't have the matplotlib.testing.noseclasses:KnownFailure plugin in use.
else:
raise
if fail_condition and fail_condition != 'indeterminate':
raise KnownFailureDidNotFailTest(msg)
return result
return nose.tools.make_decorator(f)(failer)
return known_fail_decorator
def _do_cleanup(original_units_registry):
plt.close('all')
gc.collect()
import matplotlib.testing
matplotlib.testing.setup()
matplotlib.units.registry.clear()
matplotlib.units.registry.update(original_units_registry)
warnings.resetwarnings() # reset any warning filters set in tests
class CleanupTest(object):
@classmethod
def setup_class(cls):
cls.original_units_registry = matplotlib.units.registry.copy()
@classmethod
def teardown_class(cls):
_do_cleanup(cls.original_units_registry)
def test(self):
self._func()
class CleanupTestCase(unittest.TestCase):
'''A wrapper for unittest.TestCase that includes cleanup operations'''
@classmethod
def setUpClass(cls):
import matplotlib.units
cls.original_units_registry = matplotlib.units.registry.copy()
@classmethod
def tearDownClass(cls):
_do_cleanup(cls.original_units_registry)
def cleanup(func):
@functools.wraps(func)
def wrapped_function(*args, **kwargs):
original_units_registry = matplotlib.units.registry.copy()
try:
func(*args, **kwargs)
finally:
_do_cleanup(original_units_registry)
return wrapped_function
def check_freetype_version(ver):
if ver is None:
return True
from distutils import version
if isinstance(ver, six.string_types):
ver = (ver, ver)
ver = [version.StrictVersion(x) for x in ver]
found = version.StrictVersion(ft2font.__freetype_version__)
return found >= ver[0] and found <= ver[1]
class ImageComparisonTest(CleanupTest):
@classmethod
def setup_class(cls):
CleanupTest.setup_class()
cls._func()
@staticmethod
def remove_text(figure):
figure.suptitle("")
for ax in figure.get_axes():
ax.set_title("")
ax.xaxis.set_major_formatter(ticker.NullFormatter())
ax.xaxis.set_minor_formatter(ticker.NullFormatter())
ax.yaxis.set_major_formatter(ticker.NullFormatter())
ax.yaxis.set_minor_formatter(ticker.NullFormatter())
try:
ax.zaxis.set_major_formatter(ticker.NullFormatter())
ax.zaxis.set_minor_formatter(ticker.NullFormatter())
except AttributeError:
pass
def test(self):
baseline_dir, result_dir = _image_directories(self._func)
for fignum, baseline in zip(plt.get_fignums(), self._baseline_images):
for extension in self._extensions:
will_fail = not extension in comparable_formats()
if will_fail:
fail_msg = 'Cannot compare %s files on this system' % extension
else:
fail_msg = 'No failure expected'
orig_expected_fname = os.path.join(baseline_dir, baseline) + '.' + extension
if extension == 'eps' and not os.path.exists(orig_expected_fname):
orig_expected_fname = os.path.join(baseline_dir, baseline) + '.pdf'
expected_fname = make_test_filename(os.path.join(
result_dir, os.path.basename(orig_expected_fname)), 'expected')
actual_fname = os.path.join(result_dir, baseline) + '.' + extension
if os.path.exists(orig_expected_fname):
shutil.copyfile(orig_expected_fname, expected_fname)
else:
will_fail = True
fail_msg = 'Do not have baseline image %s' % expected_fname
@knownfailureif(
will_fail, fail_msg,
known_exception_class=ImageComparisonFailure)
def do_test():
figure = plt.figure(fignum)
if self._remove_text:
self.remove_text(figure)
figure.savefig(actual_fname, **self._savefig_kwarg)
plt.close(figure)
err = compare_images(expected_fname, actual_fname,
self._tol, in_decorator=True)
try:
if not os.path.exists(expected_fname):
raise ImageComparisonFailure(
'image does not exist: %s' % expected_fname)
if err:
raise ImageComparisonFailure(
'images not close: %(actual)s vs. %(expected)s '
'(RMS %(rms).3f)'%err)
except ImageComparisonFailure:
if not check_freetype_version(self._freetype_version):
raise KnownFailureTest(
"Mismatched version of freetype. Test requires '%s', you have '%s'" %
(self._freetype_version, ft2font.__freetype_version__))
raise
yield (do_test,)
def image_comparison(baseline_images=None, extensions=None, tol=13,
freetype_version=None, remove_text=False,
savefig_kwarg=None):
"""
call signature::
image_comparison(baseline_images=['my_figure'], extensions=None)
Compare images generated by the test with those specified in
*baseline_images*, which must correspond else an
ImageComparisonFailure exception will be raised.
Keyword arguments:
*baseline_images*: list
A list of strings specifying the names of the images generated
by calls to :meth:`matplotlib.figure.savefig`.
*extensions*: [ None | list ]
If *None*, default to all supported extensions.
Otherwise, a list of extensions to test. For example ['png','pdf'].
*tol*: (default 13)
The RMS threshold above which the test is considered failed.
*freetype_version*: str or tuple
The expected freetype version or range of versions for this
test to pass.
*remove_text*: bool
Remove the title and tick text from the figure before
comparison. This does not remove other, more deliberate,
text, such as legends and annotations.
*savefig_kwarg*: dict
Optional arguments that are passed to the savefig method.
"""
if baseline_images is None:
raise ValueError('baseline_images must be specified')
if extensions is None:
# default extensions to test
extensions = ['png', 'pdf', 'svg']
if savefig_kwarg is None:
#default no kwargs to savefig
savefig_kwarg = dict()
def compare_images_decorator(func):
# We want to run the setup function (the actual test function
# that generates the figure objects) only once for each type
# of output file. The only way to achieve this with nose
# appears to be to create a test class with "setup_class" and
# "teardown_class" methods. Creating a class instance doesn't
# work, so we use type() to actually create a class and fill
# it with the appropriate methods.
name = func.__name__
# For nose 1.0, we need to rename the test function to
# something without the word "test", or it will be run as
# well, outside of the context of our image comparison test
# generator.
func = staticmethod(func)
func.__get__(1).__name__ = str('_private')
new_class = type(
name,
(ImageComparisonTest,),
{'_func': func,
'_baseline_images': baseline_images,
'_extensions': extensions,
'_tol': tol,
'_freetype_version': freetype_version,
'_remove_text': remove_text,
'_savefig_kwarg': savefig_kwarg})
return new_class
return compare_images_decorator
def _image_directories(func):
"""
Compute the baseline and result image directories for testing *func*.
Create the result directory if it doesn't exist.
"""
module_name = func.__module__
# mods = module_name.split('.')
# mods.pop(0) # <- will be the name of the package being tested (in
# most cases "matplotlib")
# assert mods.pop(0) == 'tests'
# subdir = os.path.join(*mods)
subdir = module_name
import imp
def find_dotted_module(module_name, path=None):
"""A version of imp which can handle dots in the module name"""
res = None
for sub_mod in module_name.split('.'):
try:
res = file, path, _ = imp.find_module(sub_mod, path)
path = [path]
if file is not None:
file.close()
except ImportError:
# assume namespace package
path = sys.modules[sub_mod].__path__
res = None, path, None
return res
mod_file = find_dotted_module(func.__module__)[1]
basedir = os.path.dirname(mod_file)
baseline_dir = os.path.join(basedir, 'baseline_images', subdir)
result_dir = os.path.abspath(os.path.join('result_images', subdir))
if not os.path.exists(result_dir):
cbook.mkdirs(result_dir)
return baseline_dir, result_dir
|
ruohoruotsi/librosa
|
tests/mpl_ic.py
|
Python
|
isc
| 11,814
|
[
"Brian"
] |
4bbefb01035e659bd609e4a0d14f1c3bf64c2fd302fdf01bbcac50d155370fe0
|
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
import os
import shutil
class Mfem(Package):
"""Free, lightweight, scalable C++ library for finite element methods."""
tags = ['FEM', 'finite elements', 'high-order', 'AMR', 'HPC']
homepage = 'http://www.mfem.org'
url = 'https://github.com/mfem/mfem'
maintainers = ['goxberry', 'tzanio', 'markcmiller86', 'acfisher',
'v-dobrev']
# Recommended mfem builds to test when updating this file: see the shell
# script 'test_builds.sh' in the same directory as this file.
# mfem is downloaded from a URL shortener at request of upstream
# author Tzanio Kolev <tzanio@llnl.gov>. See here:
# https://github.com/mfem/mfem/issues/53
#
# The following procedure should be used to verify security when a
# new version is added:
#
# 1. Verify that no checksums on old versions have changed.
#
# 2. Verify that the shortened URL for the new version is listed at:
# http://mfem.org/download/
#
# 3. Use http://getlinkinfo.com or similar to verify that the
# underling download link for the latest version comes has the
# prefix: http://mfem.github.io/releases
#
# If this quick verification procedure fails, additional discussion
# will be required to verify the new version.
# 'develop' is a special version that is always larger (or newer) than any
# other version.
version('develop',
git='https://github.com/mfem/mfem', branch='master')
version('3.4.0',
'4e73e4fe0482636de3c5dc983cd395839a83cb16f6f509bd88b053e8b3858e05',
url='https://bit.ly/mfem-3-4', extension='.tar.gz',
preferred=True)
version('3.3.2',
'b70fa3c5080b9ec514fc05f4a04ff74322b99ac4ecd6d99c229f0ed5188fc0ce',
url='https://goo.gl/Kd7Jk8', extension='.tar.gz')
version('laghos-v1.0', git='https://github.com/mfem/mfem',
tag='laghos-v1.0')
version('3.3',
'b17bd452593aada93dc0fee748fcfbbf4f04ce3e7d77fdd0341cc9103bcacd0b',
url='http://goo.gl/Vrpsns', extension='.tar.gz')
version('3.2',
'2938c3deed4ec4f7fd5b5f5cfe656845282e86e2dcd477d292390058b7b94340',
url='http://goo.gl/Y9T75B', extension='.tar.gz')
version('3.1',
'841ea5cf58de6fae4de0f553b0e01ebaab9cd9c67fa821e8a715666ecf18fc57',
url='http://goo.gl/xrScXn', extension='.tar.gz')
variant('static', default=True,
description='Build static library')
variant('shared', default=False,
description='Build shared library')
variant('mpi', default=True,
description='Enable MPI parallelism')
# Can we make the default value for 'metis' to depend on the 'mpi' value?
variant('metis', default=True,
description='Enable METIS support')
# TODO: The 'hypre' variant is the same as 'mpi', we may want to remove it.
# For now, keep the 'hypre' variant while ignoring its setting. This
# is done to preserve compatibility with other packages that refer to
# it, e.g. xSDK.
variant('hypre', default=True,
description='Required for MPI parallelism')
variant('openmp', default=False,
description='Enable OpenMP parallelism')
variant('threadsafe', default=False,
description=('Enable thread safe features.'
' Required for OpenMP.'
' May cause minor performance issues.'))
variant('superlu-dist', default=False,
description='Enable MPI parallel, sparse direct solvers')
# Placeholder for STRUMPACK, support added in mfem v3.3.2:
# variant('strumpack', default=False,
# description='Enable support for STRUMPACK')
variant('suite-sparse', default=False,
description='Enable serial, sparse direct solvers')
variant('petsc', default=False,
description='Enable PETSc solvers, preconditioners, etc.')
variant('sundials', default=False,
description='Enable Sundials time integrators')
variant('pumi', default=False,
description='Enable functionality based on PUMI')
variant('mpfr', default=False,
description='Enable precise, 1D quadrature rules')
variant('lapack', default=False,
description='Use external blas/lapack routines')
variant('debug', default=False,
description='Build debug instead of optimized version')
variant('netcdf', default=False,
description='Enable Cubit/Genesis reader')
variant('conduit', default=False,
description='Enable binary data I/O using Conduit')
variant('gzstream', default=True,
description='Support zip\'d streams for I/O')
variant('gnutls', default=False,
description='Enable secure sockets using GnuTLS')
variant('libunwind', default=False,
description='Enable backtrace on error support using Libunwind')
variant('timer', default='auto',
values=('auto', 'std', 'posix', 'mac', 'mpi'),
description='Timing functions to use in mfem::StopWatch')
variant('examples', default=False,
description='Build and install examples')
variant('miniapps', default=False,
description='Build and install miniapps')
conflicts('+shared', when='@:3.3.2')
conflicts('~static~shared')
conflicts('~threadsafe', when='+openmp')
conflicts('+netcdf', when='@:3.1')
conflicts('+superlu-dist', when='@:3.1')
conflicts('+gnutls', when='@:3.1')
conflicts('+gzstream', when='@:3.2')
conflicts('+mpfr', when='@:3.2')
conflicts('+petsc', when='@:3.2')
conflicts('+sundials', when='@:3.2')
conflicts('+pumi', when='@:3.3.2')
conflicts('timer=mac', when='@:3.3.0')
conflicts('timer=mpi', when='@:3.3.0')
conflicts('~metis+mpi', when='@:3.3.0')
conflicts('+metis~mpi', when='@:3.3.0')
conflicts('+conduit', when='@:3.3.2')
conflicts('+superlu-dist', when='~mpi')
conflicts('+petsc', when='~mpi')
conflicts('+pumi', when='~mpi')
conflicts('timer=mpi', when='~mpi')
conflicts('+pumi', when='+shared')
depends_on('mpi', when='+mpi')
depends_on('hypre@2.10.0:2.13.999', when='@:3.3.999+mpi')
depends_on('hypre', when='@3.4:+mpi')
depends_on('metis', when='+metis')
depends_on('blas', when='+lapack')
depends_on('lapack', when='+lapack')
depends_on('sundials@2.7.0', when='@:3.3.0+sundials~mpi')
depends_on('sundials@2.7.0+mpi+hypre', when='@:3.3.0+sundials+mpi')
depends_on('sundials@2.7.0:', when='@3.3.2:+sundials~mpi')
depends_on('sundials@2.7.0:+mpi+hypre', when='@3.3.2:+sundials+mpi')
depends_on('pumi', when='+pumi')
depends_on('suite-sparse', when='+suite-sparse')
depends_on('superlu-dist', when='+superlu-dist')
# The PETSc tests in MFEM will fail if PETSc is not configured with
# SuiteSparse and MUMPS. On the other hand, if we require the variants
# '+suite-sparse+mumps' of PETSc, the xsdk package concretization fails.
depends_on('petsc@3.8:+mpi+double+hypre', when='+petsc')
# Recommended when building outside of xsdk:
# depends_on('petsc@3.8:+mpi+double+hypre+suite-sparse+mumps',
# when='+petsc')
depends_on('mpfr', when='+mpfr')
depends_on('netcdf', when='+netcdf')
depends_on('libunwind', when='+libunwind')
depends_on('zlib', when='+gzstream')
depends_on('gnutls', when='+gnutls')
depends_on('conduit@0.3.1:', when='+conduit')
depends_on('conduit+mpi', when='+conduit+mpi')
patch('mfem_ppc_build.patch', when='@3.2:3.3.0 arch=ppc64le')
patch('mfem-3.4.patch', when='@3.4.0')
patch('mfem-3.3-3.4-petsc-3.9.patch',
when='@3.3.0:3.4.0,develop +petsc ^petsc@3.9.0:')
phases = ['configure', 'build', 'install']
def setup_environment(self, spack_env, run_env):
spack_env.unset('MFEM_DIR')
spack_env.unset('MFEM_BUILD_DIR')
#
# Note: Although MFEM does support CMake configuration, MFEM
# development team indicates that vanilla GNU Make is the
# preferred mode of configuration of MFEM and the mode most
# likely to be up to date in supporting *all* of MFEM's
# configuration options. So, don't use CMake
#
def configure(self, spec, prefix):
def yes_no(varstr):
return 'YES' if varstr in self.spec else 'NO'
# We need to add rpaths explicitly to allow proper export of link flags
# from within MFEM.
# Similar to spec[pkg].libs.ld_flags but prepends rpath flags too.
def ld_flags_from_LibraryList(libs_list):
flags = ['-Wl,-rpath,%s' % dir for dir in libs_list.directories]
flags += [libs_list.ld_flags]
return ' '.join(flags)
def ld_flags_from_dirs(pkg_dirs_list, pkg_libs_list):
flags = ['-Wl,-rpath,%s' % dir for dir in pkg_dirs_list]
flags += ['-L%s' % dir for dir in pkg_dirs_list]
flags += ['-l%s' % lib for lib in pkg_libs_list]
return ' '.join(flags)
def find_optional_library(name, prefix):
for shared in [True, False]:
for path in ['lib64', 'lib']:
lib = find_libraries(name, join_path(prefix, path),
shared=shared, recursive=False)
if lib:
return lib
return LibraryList([])
metis5_str = 'NO'
if ('+metis' in spec) and spec['metis'].satisfies('@5:'):
metis5_str = 'YES'
options = [
'PREFIX=%s' % prefix,
'MFEM_USE_MEMALLOC=YES',
'MFEM_DEBUG=%s' % yes_no('+debug'),
# NOTE: env['CXX'] is the spack c++ compiler wrapper. The real
# compiler is defined by env['SPACK_CXX'].
'CXX=%s' % env['CXX'],
'MFEM_USE_LIBUNWIND=%s' % yes_no('+libunwind'),
'MFEM_USE_GZSTREAM=%s' % yes_no('+gzstream'),
'MFEM_USE_METIS=%s' % yes_no('+metis'),
'MFEM_USE_METIS_5=%s' % metis5_str,
'MFEM_THREAD_SAFE=%s' % yes_no('+threadsafe'),
'MFEM_USE_MPI=%s' % yes_no('+mpi'),
'MFEM_USE_LAPACK=%s' % yes_no('+lapack'),
'MFEM_USE_SUPERLU=%s' % yes_no('+superlu-dist'),
'MFEM_USE_SUITESPARSE=%s' % yes_no('+suite-sparse'),
'MFEM_USE_SUNDIALS=%s' % yes_no('+sundials'),
'MFEM_USE_PETSC=%s' % yes_no('+petsc'),
'MFEM_USE_PUMI=%s' % yes_no('+pumi'),
'MFEM_USE_NETCDF=%s' % yes_no('+netcdf'),
'MFEM_USE_MPFR=%s' % yes_no('+mpfr'),
'MFEM_USE_GNUTLS=%s' % yes_no('+gnutls'),
'MFEM_USE_OPENMP=%s' % yes_no('+openmp'),
'MFEM_USE_CONDUIT=%s' % yes_no('+conduit')]
cxxflags = spec.compiler_flags['cxxflags']
if cxxflags:
# The cxxflags are set by the spack c++ compiler wrapper. We also
# set CXXFLAGS explicitly, for clarity, and to properly export the
# cxxflags in the variable MFEM_CXXFLAGS in config.mk.
options += ['CXXFLAGS=%s' % ' '.join(cxxflags)]
if '~static' in spec:
options += ['STATIC=NO']
if '+shared' in spec:
options += ['SHARED=YES', 'PICFLAG=%s' % self.compiler.pic_flag]
if '+mpi' in spec:
options += ['MPICXX=%s' % spec['mpi'].mpicxx]
hypre = spec['hypre']
# The hypre package always links with 'blas' and 'lapack'.
all_hypre_libs = hypre.libs + hypre['lapack'].libs + \
hypre['blas'].libs
options += [
'HYPRE_OPT=-I%s' % hypre.prefix.include,
'HYPRE_LIB=%s' % ld_flags_from_LibraryList(all_hypre_libs)]
if '+metis' in spec:
options += [
'METIS_OPT=-I%s' % spec['metis'].prefix.include,
'METIS_LIB=%s' %
ld_flags_from_dirs([spec['metis'].prefix.lib], ['metis'])]
if '+lapack' in spec:
lapack_blas = spec['lapack'].libs + spec['blas'].libs
options += [
# LAPACK_OPT is not used
'LAPACK_LIB=%s' % ld_flags_from_LibraryList(lapack_blas)]
if '+superlu-dist' in spec:
lapack_blas = spec['lapack'].libs + spec['blas'].libs
options += [
'SUPERLU_OPT=-I%s -I%s' %
(spec['superlu-dist'].prefix.include,
spec['parmetis'].prefix.include),
'SUPERLU_LIB=-L%s -L%s -lsuperlu_dist -lparmetis %s' %
(spec['superlu-dist'].prefix.lib,
spec['parmetis'].prefix.lib,
ld_flags_from_LibraryList(lapack_blas))]
if '+suite-sparse' in spec:
ss_spec = 'suite-sparse:' + self.suitesparse_components
options += [
'SUITESPARSE_OPT=-I%s' % spec[ss_spec].prefix.include,
'SUITESPARSE_LIB=%s' %
ld_flags_from_LibraryList(spec[ss_spec].libs)]
if '+sundials' in spec:
sun_spec = 'sundials:' + self.sundials_components
options += [
'SUNDIALS_OPT=%s' % spec[sun_spec].headers.cpp_flags,
'SUNDIALS_LIB=%s' %
ld_flags_from_LibraryList(spec[sun_spec].libs)]
if '+petsc' in spec:
# options += ['PETSC_DIR=%s' % spec['petsc'].prefix]
options += [
'PETSC_OPT=%s' % spec['petsc'].headers.cpp_flags,
'PETSC_LIB=%s' %
ld_flags_from_LibraryList(spec['petsc'].libs)]
if '+pumi' in spec:
options += ['PUMI_DIR=%s' % spec['pumi'].prefix]
if '+netcdf' in spec:
options += [
'NETCDF_OPT=-I%s' % spec['netcdf'].prefix.include,
'NETCDF_LIB=%s' %
ld_flags_from_dirs([spec['netcdf'].prefix.lib], ['netcdf'])]
if '+gzstream' in spec:
if "@:3.3.2" in spec:
options += ['ZLIB_DIR=%s' % spec['zlib'].prefix]
else:
options += [
'ZLIB_OPT=-I%s' % spec['zlib'].prefix.include,
'ZLIB_LIB=%s' %
ld_flags_from_LibraryList(spec['zlib'].libs)]
if '+mpfr' in spec:
options += [
'MPFR_OPT=-I%s' % spec['mpfr'].prefix.include,
'MPFR_LIB=%s' %
ld_flags_from_dirs([spec['mpfr'].prefix.lib], ['mpfr'])]
if '+gnutls' in spec:
options += [
'GNUTLS_OPT=-I%s' % spec['gnutls'].prefix.include,
'GNUTLS_LIB=%s' %
ld_flags_from_dirs([spec['gnutls'].prefix.lib], ['gnutls'])]
if '+libunwind' in spec:
libunwind = spec['libunwind']
headers = find_headers('libunwind', libunwind.prefix.include)
headers.add_macro('-g')
libs = find_optional_library('libunwind', libunwind.prefix)
# When mfem uses libunwind, it also needs 'libdl'.
libs += LibraryList(find_system_libraries('libdl'))
options += [
'LIBUNWIND_OPT=%s' % headers.cpp_flags,
'LIBUNWIND_LIB=%s' % ld_flags_from_LibraryList(libs)]
if '+openmp' in spec:
options += ['OPENMP_OPT=%s' % self.compiler.openmp_flag]
timer_ids = {'std': '0', 'posix': '2', 'mac': '4', 'mpi': '6'}
timer = spec.variants['timer'].value
if timer != 'auto':
options += ['MFEM_TIMER_TYPE=%s' % timer_ids[timer]]
if '+conduit' in spec:
conduit = spec['conduit']
headers = HeaderList(find(conduit.prefix.include, 'conduit.hpp',
recursive=True))
conduit_libs = ['libconduit', 'libconduit_relay',
'libconduit_blueprint']
libs = find_libraries(conduit_libs, conduit.prefix.lib,
shared=('+shared' in conduit))
libs += LibraryList(find_system_libraries('libdl'))
if '+hdf5' in conduit:
hdf5 = conduit['hdf5']
headers += find_headers('hdf5', hdf5.prefix.include)
libs += hdf5.libs
options += [
'CONDUIT_OPT=%s' % headers.cpp_flags,
'CONDUIT_LIB=%s' % ld_flags_from_LibraryList(libs)]
make('config', *options, parallel=False)
make('info', parallel=False)
def build(self, spec, prefix):
make('lib')
@run_after('build')
def check_or_test(self):
# Running 'make check' or 'make test' may fail if MFEM_MPIEXEC or
# MFEM_MPIEXEC_NP are not set appropriately.
if not self.run_tests:
# check we can build ex1 (~mpi) or ex1p (+mpi).
make('-C', 'examples', 'ex1p' if ('+mpi' in self.spec) else 'ex1',
parallel=False)
# make('check', parallel=False)
else:
make('all')
make('test', parallel=False)
def install(self, spec, prefix):
make('install', parallel=False)
# TODO: The way the examples and miniapps are being installed is not
# perfect. For example, the makefiles do not work.
install_em = ('+examples' in spec) or ('+miniapps' in spec)
if install_em and ('+shared' in spec):
make('examples/clean', 'miniapps/clean')
# This is a hack to get the examples and miniapps to link with the
# installed shared mfem library:
with working_dir('config'):
os.rename('config.mk', 'config.mk.orig')
shutil.copyfile(str(self.config_mk), 'config.mk')
shutil.copystat('config.mk.orig', 'config.mk')
if '+examples' in spec:
make('examples')
install_tree('examples', join_path(prefix, 'examples'))
if '+miniapps' in spec:
make('miniapps')
install_tree('miniapps', join_path(prefix, 'miniapps'))
if install_em:
install_tree('data', join_path(prefix, 'data'))
@property
def suitesparse_components(self):
"""Return the SuiteSparse components needed by MFEM."""
ss_comps = 'umfpack,cholmod,colamd,amd,camd,ccolamd,suitesparseconfig'
if self.spec.satisfies('@3.2:'):
ss_comps = 'klu,btf,' + ss_comps
return ss_comps
@property
def sundials_components(self):
"""Return the SUNDIALS components needed by MFEM."""
sun_comps = 'arkode,cvode,nvecserial,kinsol'
if '+mpi' in self.spec:
sun_comps += ',nvecparhyp,nvecparallel'
return sun_comps
@property
def headers(self):
"""Export the main mfem header, mfem.hpp.
"""
hdrs = HeaderList(find(self.prefix.include, 'mfem.hpp',
recursive=False))
return hdrs or None
@property
def libs(self):
"""Export the mfem library file.
"""
libs = find_libraries('libmfem', root=self.prefix.lib,
shared=('+shared' in self.spec), recursive=False)
return libs or None
@property
def config_mk(self):
"""Export the location of the config.mk file.
This property can be accessed using spec['mfem'].package.config_mk
"""
dirs = [self.prefix, self.prefix.share.mfem]
for d in dirs:
f = join_path(d, 'config.mk')
if os.access(f, os.R_OK):
return FileList(f)
return FileList(find(self.prefix, 'config.mk', recursive=True))
@property
def test_mk(self):
"""Export the location of the test.mk file.
This property can be accessed using spec['mfem'].package.test_mk.
In version 3.3.2 and newer, the location of test.mk is also defined
inside config.mk, variable MFEM_TEST_MK.
"""
dirs = [self.prefix, self.prefix.share.mfem]
for d in dirs:
f = join_path(d, 'test.mk')
if os.access(f, os.R_OK):
return FileList(f)
return FileList(find(self.prefix, 'test.mk', recursive=True))
|
matthiasdiener/spack
|
var/spack/repos/builtin/packages/mfem/package.py
|
Python
|
lgpl-2.1
| 21,451
|
[
"NetCDF"
] |
f34172ba624097c302ff5457244c9824dab62be832c8de7f1c7b098f1da42e97
|
#!/usr/bin/env python
# find_differential_primers.py
#
# A Python script that identifies pairs of forward and reverse primers which
# are capable of amplifying either individual organisms, or a particular
# family of organisms, from a set of genome sequences. Primers are expected
# to be located within CDS features, in an attempt to maximise sequence
# stability of the primers.
#
# The script reads from a configuration file containing sequence names and,
# at a minimum, the location of a complete genome sequence. Optionally, the
# configuration file may also indicate:
# - the location of a GenBank file containing CDS feature locations,
# or an equivalent output file from the Prodigal genefinder
# (http://compbio.ornl.gov/prodigal/)
# - the locations on the genome, and sequences of, primers predicted in
# EMBOSS ePrimer3 output format
# (http://emboss.bioinformatics.nl/cgi-bin/emboss/help/eprimer3)
#
# The first step of the script, if no primer file is specified, is to use
# the sequence file as the basis for a call to EMBOSS ePrimer3
# (http://emboss.bioinformatics.nl/cgi-bin/emboss/help/eprimer3), which must
# be installed and either on the $PATH, or its location specified at the
# command line. This will generate an output file with the same stem as the
# sequence file, but with the extension '.eprimer3'. Some ePrimer3 settings,
# such as the number of primers to find, are command-line options.
#
# If no CDS feature file is specified, and the --noCDS flag is not set,
# the script will attempt first to use Prodigal
# (http://compbio.ornl.gov/prodigal/) to predict CDS locations, placing the
# output in the same directory as the sequence source. If Prodigal cannot be
# found, a warning will be given, and the script will proceed as if the
# --noCDS flag is set. If this flag is set, then all primers are carried
# through to a query with the EMBOSS PrimerSearch package
# (http://emboss.bioinformatics.nl/cgi-bin/emboss/help/primersearch) against
# all other sequences in the dataset. If the flag is not set, then all
# primers that are not located within a CDS feature are excluded from the
# PrimerSearch input. To enable this, the PrimerSearch input is written to
# an intermediate file with the same stem as the input sequence, but the
# extension '.primers'.
#
# A run of PrimerSearch is carried out with every set of primers against
# all other sequences in the dataset. The output of this search is written to
# a file with the following naming convention:
# <query>_primers_vs_<target>.primersearch
# Where <query> is the name given to the query sequence in the config file, and
# <target> is the name given to the target sequence in the config file. This
# step is not carried out if the --noprimersearch flag is set. When this flag
# is set, the script will look for the corresponding PrimerSearch output in
# the same directory as the sequence file, and will report an error if it is
# not present.
#
# Finally, the script uses the PrimerSearch results to identify primers that
# are unique to each query sequence, and to each family named in the config
# file. These are reported in files with the following naming convention:
# <query>_specific_primers.eprimer3
# <family>_specific_primers.primers
# We use ePrimer3 format for the family-specific primers, even though the
# start and end positions are meaningless, as they will amplify at different
# sites in each family member. However, the source sequence is indicated in a
# comment line, and the primer sequences and T_m/GC% values should be the same,
# regardless.
# Primers that are universal to all sequences in the sample are written in
# ePrimer3 format to the file:
# universal_primers.eprimer3
# This file has the same caveats as the family-specific file above.
#
# (c) The James Hutton Institute 2011
# Authors: Leighton Pritchard, Benjamin Leopold, Michael Robeson
#
# Contact:
# leighton.pritchard@hutton.ac.uk
#
# Leighton Pritchard,
# Information and Computing Sciences,
# James Hutton Institute,
# Errol Road,
# Invergowrie,
# Dundee,
# DD6 9LH,
# Scotland,
# UK
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# script version
# should match r"^__version__ = '(?P<version>[^']+)'$" for setup.py
__version__ = '0.1.2'
###
# IMPORTS
import logging
import logging.handlers
import multiprocessing
import os
import subprocess
import sys
import time
import traceback
import re
from collections import defaultdict # Syntactic sugar
from optparse import OptionParser # Cmd-line parsing
try:
from Bio import SeqIO # Parsing biological sequence data
from Bio.Blast.Applications import NcbiblastnCommandline
from Bio.Blast import NCBIXML # BLAST XML parser
from Bio.Emboss.Applications import Primer3Commandline, \
PrimerSearchCommandline
from Bio.Emboss import Primer3, PrimerSearch # EMBOSS parsers
from Bio.GenBank import _FeatureConsumer # For GenBank locations
from Bio.Seq import Seq # Represents a sequence
from Bio.SeqRecord import SeqRecord # Represents annotated record
from Bio.SeqFeature import SeqFeature # Represents annotated record
except ImportError:
sys.stderr.write("Biopython required for script, but not found (exiting)")
sys.exit(1)
try:
from bx.intervals.cluster import ClusterTree # Interval tree building
except ImportError:
sys.stderr.write("bx-python required for script, but not found (exiting)")
sys.exit(1)
###
# CLASSES
# Class describing an organism's genome, and associated data.
class GenomeData(object):
""" Describes an organism's genome, and has attributes:
name - short, unique (not enforced) identification string
families - string indicating family memberships
seqfilename - location of representative genome sequence file
ftfilename - location of GBK/Prodigal feature file
primerfilename - location of ePrimer3 format primers file
primers - dictionary collection of Bio.Emboss.Primer3.Primer
objects, keyed by primer name
Exposed methods are:
"""
def __init__(self, name, families=None, seqfilename=None,
ftfilename=None, primerfilename=None,
primersearchfilename=None):
""" Expects at minimum a name to identify the organism. Optionally
filenames describing the location of sequence, feature, and
primer data may be specified, along with a family classification.
name - short, unique (not enforced) identification string
family - string indicating a family membership
seqfilename - location of representative genome sequence file
ftfilename - location of GBK/Prodigal feature file
primerfilename - location of ePrimer3 format primers file
primersearchfilename - location of PrimerSearch format primers file
Rather hackily, passing '-' to any of the keyword arguments also
sets them to None; this is to aid in config file parsing, and
is a wee bit ugly.
"""
self.name = name # Short identifier
self.families = families.split(',') if families != '-' else None
self.seqfilename = seqfilename if seqfilename != '-' else None
self.ftfilename = ftfilename if ftfilename != '-' else None
self.primerfilename = primerfilename if primerfilename != '-' \
else None
self.primersearchfilename = primersearchfilename if\
primersearchfilename != '-' else None
self.primers = {} # Dict of Primer objects, keyed by name
self.sequence = None # Will hold genome sequence
self.load_sequence()
def load_sequence(self):
""" Load the sequence defined in self.seqfile into memory. We
assume it's FASTA format. This can then be used to calculate
amplicons when loading primers in.
"""
if self.seqfilename is not None:
try:
self.sequence = SeqIO.read(open(self.seqfilename, 'rU'),
'fasta')
except ValueError:
logger.error("Loading sequence file %s failed",
self.seqfilename)
logger.error(last_exception())
sys.exit(1)
def write_primers(self):
""" Write the primer pairs in self.primers out to file in an
appropriate format for PrimerSearch. If the filename is not
already defined, the filestem of the
source sequencefile is used for the output file, with the
extension '.primers'.
The method returns the number of lines written.
"""
# Define output filename, if not already defined
if self.primersearchfilename is None:
self.primersearchfilename = \
os.path.splitext(self.seqfilename)[0] + '.primers'
time_start = time.time()
logger.info("Writing primers to file %s ...",
self.primersearchfilename)
# Open handle and write data
outfh = open(self.primersearchfilename, 'w')
outfh.write("# Primers for %s\n" % self.name)
outfh.write("# Automatically generated by find_differential_primers\n")
for primers in self.primers.values():
outfh.write("%s\t%s\t%s\n" %
(primers.name, primers.forward_seq,
primers.reverse_seq))
if not len(self.primers):
logger.warning("WARNING: no primers written to %s!",
self.primersearchfilename)
# Being tidy
outfh.close()
logger.info("... wrote %d primers to %s (%.3fs)",
len(self.primers),
self.primersearchfilename, time.time() - time_start)
def get_unique_primers(self, cds_overlap=False,
oligovalid=False,
blastfilter=False):
""" Returns a list of primers that have the .amplifies_organism
attribute, but where this is an empty set.
If cds_overlap is True, then this list is restricted to those
primers whose .cds_overlap attribute is also True
"""
return self.get_primers_amplify_count(0, cds_overlap, blastfilter)
def get_family_unique_primers(self, family_members, cds_overlap=False,
blastfilter=False):
""" Returns a list of primers that have the .amplifies_organism
attribute, and where the set of organisms passed in family_members
is the same as that in .amplifies_organism, with the addition of
self.name.
If cds_overlap is True, then this list is restricted to those
primers whose .cds_overlap attribute is also True
"""
primerlist = []
for primer in self.primers.values():
if family_members == \
set([self.name]).union(primer.amplifies_organism):
primerlist.append(primer)
logger.info("[%s] %d family primers",
self.name, len(primerlist))
if cds_overlap:
primerlist = [p for p in primerlist if p.cds_overlap]
logger.info("[%s] %d primers after CDS filter",
self.name, len(primerlist))
if options.filtergc3prime:
primerlist = [p for p in primerlist if p.gc3primevalid]
logger.info("[%s] %d primers after GC 3` filter",
self.name, len(primerlist))
if options.hybridprobe:
primerlist = [p for p in primerlist if p.oligovalid]
logger.info("[%s] %d primers after oligo filter",
self.name, len(primerlist))
if blastfilter:
primerlist = [p for p in primerlist if p.blastpass]
logger.info("[%s] %d primers after BLAST filter",
self.name, len(primerlist))
if options.single_product:
primerlist = [p for p in primerlist if
p.negative_control_amplimers == 1]
logger.info("[%s] %d primers after single_product filter",
self.name, len(primerlist))
logger.info("[%s] returning %d primers",
self.name, len(primerlist))
return primerlist
def get_primers_amplify_count(self, count, cds_overlap=False,
blastfilter=False):
""" Returns a list of primers that have the .amplifies_organism
attribute and the length of this set is equal to the passed count.
If cds_overlap is True, then this list is restricted to those
primers whose .cds_overlap attribute is also True
"""
primerlist = [p for p in self.primers.values() if
count == len(p.amplifies_organism)]
logger.info("[%s] %d family primers that amplify %d orgs",
self.name, len(primerlist), count)
if cds_overlap:
primerlist = [p for p in primerlist if p.cds_overlap]
logger.info("[%s] %d primers after CDS filter",
self.name, len(primerlist))
if options.filtergc3prime:
primerlist = [p for p in primerlist if p.gc3primevalid]
logger.info("[%s] %d primers after GC 3` filter",
self.name, len(primerlist))
if options.hybridprobe:
primerlist = [p for p in primerlist if p.oligovalid]
logger.info("[%s] %d primers after oligo filter",
self.name, len(primerlist))
if blastfilter:
primerlist = [p for p in primerlist if p.blastpass]
logger.info("[%s] %d primers after BLAST filter",
self.name, len(primerlist))
if options.single_product:
primerlist = [p for p in primerlist if
p.negative_control_amplimers == 1]
logger.info("[%s] %d primers after single_product filter",
self.name, len(primerlist))
logger.info("[%s] returning %d primers",
self.name, len(primerlist))
return primerlist
# Filter primers on the basis of CDS feature overlap
def filter_primers(self, psizemin):
""" Takes the minimum size of an amplified
region, and then uses a ClusterTree to find clusters of CDS and
primer regions that overlap by this minimum size.
There is a possibility that, by stacking primer regions, some of
the reported overlapping primers may in fact not overlap CDS
regions directly, so this method may overreport primers.
- psizemin (int): minimum size of an amplified region
"""
# Load in the feature data. This is done using either SeqIO for
# files with the .gbk extension, or an ad hoc parser for
# .prodigalout prediction files
time_start = time.time()
logger.info("Loading feature data from %s ...", self.ftfilename)
if os.path.splitext(self.ftfilename)[-1] == '.gbk': # GenBank
seqrecord = [r for r in SeqIO.parse(open(self.ftfilename, 'rU'),
'genbank')]
elif os.path.splitext(self.ftfilename)[-1] == '.prodigalout':
seqrecord = parse_prodigal_features(self.ftfilename)
else:
raise IOError("Expected .gbk or .prodigalout file extension")
logger.info("... loaded %d features ...", len(seqrecord.features))
# Use a ClusterTree as an interval tree to identify those
# primers that overlap with features. By setting the minimum overlap
# to the minimum size for a primer region, we ensure that we capture
# every primer that overlaps a CDS feature by this amount, but we may
# also extend beyond the CDS by stacking primers, in principle.
logger.info("... adding CDS feature locations to ClusterTree ...")
ctree = ClusterTree(-psizemin, 2)
# Loop over CDS features and add them to the tree with ID '-1'. This
# allows us to easily separate the features from primers when
# reviewing clusters.
for feature in [f for f in seqrecord.features if f.type == 'CDS']:
ctree.insert(feature.location.nofuzzy_start,
feature.location.nofuzzy_end, -1)
# ClusterTree requires us to identify elements on the tree by integers,
# so we have to relate each primer added to an integer in a temporary
# list of the self.primers values
logger.info("... adding primer locations to cluster tree ...")
aux = {}
for i, primer in enumerate(self.primers.values()):
ctree.insert(primer.forward_start,
primer.reverse_start + primer.reverse_length, i)
aux[i] = primer
# Now we find the overlapping regions, extracting all element ids
# that are not -1. These are the indices for aux, and we modify the
# self.cds_overlap attribute directly
logger.info("... finding overlapping primers ...")
overlap_primer_ids = set() # CDS overlap primers
for (s, e, ids) in ctree.getregions():
primer_ids = set([i for i in ids if i != -1]) # get non-ft ids
overlap_primer_ids = overlap_primer_ids.union(primer_ids)
logger.info("... %d primers overlap CDS features (%.3fs)",
len(overlap_primer_ids), time.time() - time_start)
for i in overlap_primer_ids:
aux[i].cds_overlap = True
# Filter primers on the basis of internal oligo characteristics
def filter_primers_oligo(self):
""" Loops over the primer pairs in this GenomeData object and
mark primer.oligovalid as False if the internal oligo corresponds
to any of the following criteria:
- G at 5` end or 3` end
- two or more counts of 'CC'
- G in second position at 5` end
"""
time_start = time.time()
logger.info("Filtering %s primers on internal oligo...",
self.name)
invalidcount = 0
for primer in self.primers.values():
primer.oligovalid = not(primer.internal_seq.startswith('G')
or primer.internal_seq.endswith('G')
or primer.internal_seq[1:-1].count('CC')
> 1 or primer.internal_seq[1] == 'G')
if not primer.oligovalid:
invalidcount += 1
#if (primer.internal_seq.startswith('G') or
# primer.internal_seq.endswith('G') or
# primer.internal_seq[1:-1].count('CC') > 1 or
# primer.internal_seq[1] == 'G'):
# primer.oligovalid = False
# invalidcount += 1
logger.info("... %d primers failed (%.3fs)", invalidcount,
time.time() - time_start)
# Filter primers on the basis of GC content at 3` end
def filter_primers_gc_3prime(self):
""" Loops over the primer pairs in the passed GenomeData object and,
if either primer has more than 2 G+C in the last five nucleotides,
sets the .gc3primevalid flag to False.
"""
time_start = time.time()
logger.info("Filtering %s primers on 3` GC content ...", self.name)
invalidcount = 0
for primer in self.primers.values():
fseq, rseq = primer.forward_seq[-5:], primer.reverse_seq[-5:]
if (fseq.count('C') + fseq.count('G') > 2) or \
(rseq.count('C') + fseq.count('G') > 2):
primer.gc3primevalid = False
invalidcount += 1
logger.info("... %d primers failed (%.3fs)", invalidcount,
time.time() - time_start)
# Concatenate multiple fragments of a genome to a single file
def concatenate_sequences(self):
""" Takes a GenomeData object and concatenates sequences with the
spacer sequence NNNNNCATCCATTCATTAATTAATTAATGAATGAATGNNNNN (this
contains start and stop codons in all frames, to cap individual
sequences). We write this data out to a new file
For filename convention, we just add '_concatenated' to the end
of the sequence filestem, and use the '.fas' extension.
"""
# Spacer contains start and stop codons in all six frames
spacer = 'NNNNNCATCCATTCATTAATTAATTAATGAATGAATGNNNNN'
time_start = time.time()
logger.info("Concatenating sequences from %s ...", self.seqfilename)
newseq = SeqRecord(Seq(spacer.join([s.seq.data for s in
SeqIO.parse(open(self.seqfilename,
'rU'),
'fasta')])),
id=self.name + "_concatenated",
description="%s, concatenated with spacers" %
self.name)
outfilename = ''.join([os.path.splitext(self.seqfilename)[0],
'_concatenated', '.fas'])
SeqIO.write([newseq], open(outfilename, 'w'), 'fasta')
logger.info("... wrote concatenated data to %s (%.3fs)",
outfilename, time.time() - time_start)
return outfilename
def __str__(self):
""" Pretty string description of object contents
"""
outstr = ['GenomeData object: %s' % self.name]
outstr.append('Families: %s' % list(self.families))
outstr.append('Sequence file: %s' % self.seqfilename)
outstr.append('Feature file: %s' % self.ftfilename)
outstr.append('Primers file: %s' % self.primerfilename)
outstr.append('PrimerSearch file: %s' % self.primersearchfilename)
outstr.append('Primers: %d' % len(self.primers))
if len(self.primers):
outstr.append('Primers overlapping CDS: %d' %
len([p for p in self.primers.values() if
p.cds_overlap]))
return os.linesep.join(outstr) + os.linesep
###
# FUNCTIONS
# Parse command-line options
def parse_cmdline():
""" Parse command line, accepting args obtained from sys.argv
"""
usage = "usage: %prog [options] arg"
parser = OptionParser(usage)
parser.add_option("-i", "--infile", dest="filename", action="store",
help="location of configuration file",
default=None)
parser.add_option("-o", "--outdir", dest="outdir", action="store",
help="directory for output files",
default="differential_primer_results")
parser.add_option("--numreturn", dest="numreturn", action="store",
help="number of primers to find",
default=20, type="int")
parser.add_option("--hybridprobe", dest="hybridprobe", action="store_true",
help="generate internal oligo as a hybridisation probe",
default=False)
parser.add_option("--filtergc3prime", dest="filtergc3prime",
action="store_true",
help="allow no more than two GC at the 3` " +
"end of primers",
default=False)
parser.add_option("--single_product", dest="single_product",
action="store",
help="location of FASTA sequence file containing " +
"sequences from which a sequence-specific " +
"primer must amplify exactly one product.",
default=None)
parser.add_option("--prodigal", dest="prodigal_exe", action="store",
help="location of Prodigal executable",
default="prodigal")
parser.add_option("--eprimer3", dest="eprimer3_exe", action="store",
help="location of EMBOSS eprimer3 executable",
default="eprimer3")
parser.add_option("--blast_exe", dest="blast_exe", action="store",
help="location of BLASTN/BLASTALL executable",
default="blastn")
parser.add_option("--blastdb", dest="blastdb", action="store",
help="location of BLAST database",
default=None)
parser.add_option("--useblast", dest="useblast", action="store_true",
help="use existing BLAST results",
default=False)
parser.add_option("--nocds", dest="nocds", action="store_true",
help="do not restrict primer prediction to CDS",
default=False)
parser.add_option("--noprodigal", dest="noprodigal", action="store_true",
help="do not carry out Prodigal prediction step",
default=False)
parser.add_option("--noprimer3", dest="noprimer3", action="store_true",
help="do not carry out ePrimer3 prediction step",
default=False)
parser.add_option("--noprimersearch", dest="noprimersearch",
action="store_true",
help="do not carry out PrimerSearch step",
default=False)
parser.add_option("--noclassify", dest="noclassify",
action="store_true",
help="do not carry out primer classification step",
default=False)
parser.add_option("--osize", dest="osize", action="store",
help="optimal size for primer oligo",
default=20, type="int")
parser.add_option("--minsize", dest="minsize", action="store",
help="minimum size for primer oligo",
default=18, type="int")
parser.add_option("--maxsize", dest="maxsize", action="store",
help="maximum size for primer oligo",
default=22, type="int")
parser.add_option("--otm", dest="otm", action="store",
help="optimal melting temperature for primer oligo",
default=59, type="int")
parser.add_option("--mintm", dest="mintm", action="store",
help="minimum melting temperature for primer oligo",
default=58, type="int")
parser.add_option("--maxtm", dest="maxtm", action="store",
help="maximum melting temperature for primer oligo",
default=60, type="int")
parser.add_option("--ogcpercent", dest="ogcpercent", action="store",
help="optimal %GC for primer oligo",
default=55, type="int")
parser.add_option("--mingc", dest="mingc", action="store",
help="minimum %GC for primer oligo",
default=30, type="int")
parser.add_option("--maxgc", dest="maxgc", action="store",
help="maximum %GC for primer oligo",
default=80, type="int")
parser.add_option("--psizeopt", dest="psizeopt", action="store",
help="optimal size for amplified region",
default=100, type="int")
parser.add_option("--psizemin", dest="psizemin", action="store",
help="minimum size for amplified region",
default=50, type="int")
parser.add_option("--psizemax", dest="psizemax", action="store",
help="maximum size for amplified region",
default=150, type="int")
parser.add_option("--maxpolyx", dest="maxpolyx", action="store",
help="maximum run of repeated nucleotides in primer",
default=3, type="int")
parser.add_option("--mismatchpercent", dest="mismatchpercent",
action="store",
help="allowed percentage mismatch in primersearch",
default=10, type="int")
parser.add_option("--oligoosize", dest="oligoosize", action="store",
help="optimal size for internal oligo",
default=20, type="int")
parser.add_option("--oligominsize", dest="oligominsize", action="store",
help="minimum size for internal oligo",
default=13, type="int")
parser.add_option("--oligomaxsize", dest="oligomaxsize", action="store",
help="maximum size for internal oligo",
default=30, type="int")
parser.add_option("--oligootm", dest="oligootm", action="store",
help="optimal melting temperature for internal oligo",
default=69, type="int")
parser.add_option("--oligomintm", dest="oligomintm", action="store",
help="minimum melting temperature for internal oligo",
default=68, type="int")
parser.add_option("--oligomaxtm", dest="oligomaxtm", action="store",
help="maximum melting temperature for internal oligo",
default=70, type="int")
parser.add_option("--oligoogcpercent", dest="oligoogcpercent",
action="store",
help="optimal %GC for internal oligo",
default=55, type="int")
parser.add_option("--oligomingc", dest="oligomingc", action="store",
help="minimum %GC for internal oligo",
default=30, type="int")
parser.add_option("--oligomaxgc", dest="oligomaxgc", action="store",
help="maximum %GC for internal oligo",
default=80, type="int")
parser.add_option("--oligomaxpolyx", dest="oligomaxpolyx", action="store",
help="maximum run of repeated nt in internal oligo",
default=3, type="int")
parser.add_option("--cpus", dest="cpus", action="store",
help="number of CPUs to use in multiprocessing",
default=multiprocessing.cpu_count(), type="int")
parser.add_option("--sge", dest="sge", action="store_true",
help="use SGE job scheduler",
default=False)
parser.add_option("--clean", action="store_true", dest="clean",
help="clean up old output files before running",
default=False)
parser.add_option("--cleanonly", action="store_true", dest="cleanonly",
help="clean up old output files and exit",
default=False)
parser.add_option("-l", "--logfile", dest="logfile",
action="store", default=None,
help="script logfile location")
parser.add_option("-v", "--verbose", action="store_true", dest="verbose",
help="report progress to log",
default=False)
parser.add_option("--debug", action="store_true", dest="debug",
help="report extra progress to log for debugging",
default=False)
parser.add_option("--keep_logs", action="store_true", dest="keep_logs",
help="store log files from each process",
default=False)
parser.add_option("--log_dir", action="store", dest="log_dir",
help="store called process log files in this directory",
default=None)
(optsparsed, argsparsed) = parser.parse_args()
return (optsparsed, argsparsed, parser)
# Report last exception as string
def last_exception():
""" Returns last exception as a string, or use in logging.
"""
exc_type, exc_value, exc_traceback = sys.exc_info()
return ''.join(traceback.format_exception(exc_type, exc_value,
exc_traceback))
# Create a list of GenomeData objects corresponding to config file entries
def create_gd_from_config(filename):
""" Parses data from a configuration file into a list of GenomeData
objects.
Returns a list of GenomeData objects.
Each line of the config file describes a single genome.
The config file format is six tab-separated columns, where columns
may be separated by multiple tabs. 'Empty' data values are indicated
by the '-' symbol, and these are converted into None objects in
parsing.
Comment lines start with '#', as in Python.
The five columns are:
1) Genome name
2) Genome family
3) Location of FASTA format sequence data
4) Location of GENBANK/PRODIGAL format feature data
5) Location of EPRIMER3 format primer data
6) Location of PRIMERSEARCH input format primer data
The data would, of course, be better presented as an XML file, but it
might be useful to maintain both tab- and XML-formatted approaches to
facilitate human construction as well as computational.
"""
time_start = time.time()
logger.info("Creating list of genomes from config file %s ...", filename)
gd_list = [] # Hold GenomeData objects
# Ignore blank lines and comments...
for line in [l.strip() for l in open(filename, 'rU')
if l.strip() and not l.startswith('#')]:
# Split data and create new GenomeData object, adding it to the list
data = [e.strip() for e in line.strip().split('\t') if e.strip()]
name, family, sfile, ffile, pfile, psfile = tuple(data)
gd_list.append(GenomeData(name, family, sfile, ffile, pfile, psfile))
logger.info("... created GenomeData object for %s ...", name)
logger.info(gd_list[-1])
logger.info("... created %d GenomeData objects (%.3fs)",
len(gd_list), time.time() - time_start)
return gd_list
# Check whether each GenomeData object has multiple sequence and, if so,
# concatenate them sensibly, resetting feature and primer file locations to
# None
def check_single_sequence(gd_list):
""" Loops over the GenomeData objects in the passed list and, where the
sequence file contains multiple sequences, concatenates them into
a single sequence using a spacer that facilitates gene-finding. As
this process changes feature and primer locations, the ftfilename and
primerfilename attributes are reset to None, and these are
recalculated later on in the script, where necessary.
"""
time_start = time.time()
logger.info("Checking for multiple sequences ...")
for gd_obj in gd_list:
# Verify that the sequence file contains a single sequence
seqdata = [s for s in SeqIO.parse(open(gd_obj.seqfilename, 'rU'),
'fasta')]
if len(seqdata) != 1:
logger.info("... %s describes multiple sequences ...",
gd_obj.seqfilename)
gd_obj.seqfilename = gd_obj.concatenate_sequences() # Concatenate
logger.info("... clearing feature and primer file locations ...")
(gd_obj.ftfilename, gd_obj.primerfilename,
gd_obj.primersearchfilename) = \
(None, None, None)
logger.info("... checked %d GenomeData objects (%.3fs)",
len(gd_list), time.time() - time_start)
# Check for each GenomeData object in a passed list, the existence of
# the feature file, and create one using Prodigal if it doesn't exist already
def check_ftfilenames(gd_list):
""" Loop over the GenomeData objects in gdlist and, where no feature file
is specified, add the GenomeData object to the list of
packets to be processed in parallel by Prodigal using multiprocessing.
"""
logger.info("Checking and predicting features for GenomeData files ...")
# We split the GenomeData objects into those with, and without,
# defined feature files, but we don't test the validity of the files
# that were predefined, here.
# We don't use the objects with features here, though
#gds_with_ft = [gd_obj for gd_obj in gd_list if
# (gd_obj.ftfilename is not None and
# os.path.isfile(gd_obj.ftfilename))]
gds_no_ft = [gd_obj for gd_obj in gd_list if
(gd_obj.ftfilename is None or
not os.path.isfile(gd_obj.ftfilename))]
# Predict features for those GenomeData objects with no feature file
logger.info("... %d GenomeData objects have no feature file ...",
len(gds_no_ft))
logger.info("... running %d Prodigal jobs to predict CDS ...",
len(gds_no_ft))
# Create a list of command-line tuples, for Prodigal
# gene prediction applied to each GenomeData object in gds_no_ft.
clines = []
for gd_obj in gds_no_ft:
gd_obj.ftfilename = os.path.splitext(gd_obj.seqfilename)[0] +\
'.prodigalout'
seqfilename = os.path.splitext(gd_obj.seqfilename)[0] + '.features'
cline = "%s -a %s < %s > %s" % (options.prodigal_exe, seqfilename,
gd_obj.seqfilename, gd_obj.ftfilename)
clines.append(cline + log_output(gd_obj.name + ".prodigal"))
logger.info("... Prodigal jobs to run:")
logger.info("Running:\n" + "\n".join(clines))
# Depending on the type of parallelisation required, these command-lines
# are either run locally via multiprocessing, or passed out to SGE
if not options.sge:
multiprocessing_run(clines)
else:
sge_run(clines)
# Check whether GenomeData objects have a valid primer definition file
def check_primers(gd_list):
""" Loop over GenomeData objects in the passed gdlist and, if they have
a defined primerfilename attribute, attempt to parse it. If this
is successful, do nothing. If it fails, set the primerfilename
attribute to None.
"""
logger.info("Checking ePrimer3 output files ...")
for gd_obj in [g for g in gd_list if g.primerfilename]:
try:
Primer3.read(open(gd_obj.primerfilename, 'rU'))
logger.info("... %s primer file %s OK ...",
gd_obj.name, gd_obj.primerfilename)
except IOError:
logger.info("... %s primer file %s not OK ...",
gd_obj.name, gd_obj.primerfilename)
gd_obj.primerfilename = None
# Check for each GenomeData object in a passed list, the existence of
# the ePrimer3 file, and create one using ePrimer3 if it doesn't exist already
def predict_primers(gd_list, emboss_version):
""" Loop over the GenomeData objects in gdlist and, where no primer file
is specified, add the GenomeData object to the list of
packets to be processed in parallel by Prodigal using multiprocessing.
"""
logger.info("Checking and predicting primers for GenomeData files ...")
# We need to split the GenomeData objects into those with, and without,
# defined primer files, but we don't test the validity of these files
# We don't use the gds
#gds_with_primers = [g for g in gd_list if g.primerfilename is not None]
gds_no_primers = [g for g in gd_list if g.primerfilename is None]
# Predict primers for those GenomeData objects with no primer file
logger.info("... %d GenomeData objects have no primer file ...",
len(gds_no_primers))
logger.info("... running %d ePrimer3 jobs to predict CDS ...",
len(gds_no_primers))
# Create command-lines to run ePrimer3
clines = []
for gd_obj in gds_no_primers:
# Create ePrimer3 command-line.
cline = Primer3Commandline(cmd=options.eprimer3_exe)
cline.sequence = gd_obj.seqfilename
cline.auto = True
cline.osize = "%d" % options.osize # Optimal primer size
cline.minsize = "%d" % options.minsize # Min primer size
cline.maxsize = "%d" % options.maxsize # Max primer size
# Optimal primer Tm option dependent on EMBOSS version
if float('.'.join(emboss_version.split('.')[:2])) >= 6.5:
cline.opttm = "%d" % options.otm # Optimal primer Tm
else:
cline.otm = "%d" % options.otm
cline.mintm = "%d" % options.mintm # Min primer Tm
cline.maxtm = "%d" % options.maxtm # Max primer Tm
cline.ogcpercent = "%d" % options.ogcpercent # Optimal primer %GC
cline.mingc = "%d" % options.mingc # Min primer %GC
cline.maxgc = "%d" % options.maxgc # Max primer %GC
cline.psizeopt = "%d" % options.psizeopt # Optimal product size
# Longest polyX run in primer
cline.maxpolyx = "%d" % options.maxpolyx
# Allowed product sizes
cline.prange = "%d-%d" % (options.psizemin, options.psizemax)
# Number of primers to predict
cline.numreturn = "%d" % options.numreturn
cline.hybridprobe = options.hybridprobe # Predict internal oligo?
# Internal oligo parameters;
cline.osizeopt = "%d" % options.oligoosize
# We use EMBOSS v6 parameter names, here.
cline.ominsize = "%d" % options.oligominsize
cline.omaxsize = "%d" % options.oligomaxsize
cline.otmopt = "%d" % options.oligootm
cline.otmmin = "%d" % options.oligomintm
cline.otmmax = "%d" % options.oligomaxtm
cline.ogcopt = "%d" % options.oligoogcpercent
cline.ogcmin = "%d" % options.oligomingc
cline.ogcmax = "%d" % options.oligomaxgc
cline.opolyxmax = "%d" % options.oligomaxpolyx
cline.outfile = os.path.splitext(gd_obj.seqfilename)[0] + '.eprimer3'
gd_obj.primerfilename = cline.outfile
clines.append(str(cline) + log_output(gd_obj.name + ".eprimer3"))
logger.info("... ePrimer3 jobs to run:")
logger.info("Running:\n" + '\n'.join(clines))
# Parallelise jobs
if not options.sge:
multiprocessing_run(clines)
else:
sge_run(clines)
# Load primers from ePrimer3 files into each GenomeData object
def load_primers(gd_list):
""" Load primer data from an ePrimer3 output file into a dictionary of
Bio.Emboss.Primer3.Primer objects (keyed by primer name) in a
GenomeData object, for each such object in the passed list.
Each primer object is given a new ad hoc attribute 'cds_overlap' which
takes a Boolean, indicating whether the primer is found wholly within
a CDS defined in the GenomeData object's feature file; this status
is determined using an interval tree approach.
"""
logger.info("Loading primers, %sfiltering on CDS overlap",
'not ' if options.nocds else '')
# Load in the primers, assigning False to a new, ad hoc attribute called
# cds_overlap in each
for gd_obj in gd_list:
logger.info("... loading primers into %s from %s ...",
gd_obj.name, gd_obj.primerfilename)
try:
os.path.isfile(gd_obj.primerfilename)
except TypeError:
raise IOError("Primer file %s does not exist." %
gd_obj.primerfilename)
primers = Primer3.read(open(gd_obj.primerfilename, 'rU')).primers
# Add primer pairs to the gd.primers dictionary
primercount = 0
for primer in primers:
primercount += 1
primer.cds_overlap = False # default state
primer.name = "%s_primer_%04d" % (gd_obj.name, primercount)
primer.amplifies_organism = set() # Organisms amplified
primer.amplifies_family = set() # Organism families amplified
primer.gc3primevalid = True # Passes GC 3` test
primer.oligovalid = True # Oligo passes filter
primer.blastpass = True # Primers pass BLAST screen
gd_obj.primers.setdefault(primer.name, primer)
primer.amplicon = \
gd_obj.sequence[primer.forward_start - 1:
primer.reverse_start - 1 +
primer.reverse_length]
primer.amplicon.description = primer.name
logger.info("... loaded %d primers into %s ...",
len(gd_obj.primers), gd_obj.name)
# Now that the primers are in the GenomeData object, we can filter
# them on location, if necessary
if not options.nocds:
gd_obj.filter_primers(options.psizemin)
# We also filter primers on the basis of GC presence at the 3` end
if options.filtergc3prime:
gd_obj.filter_primers_gc_3prime()
# Filter primers on the basis of internal oligo characteristics
if options.hybridprobe:
gd_obj.filter_primers_oligo()
# Screen passed GenomeData primers against BLAST database
def blast_screen(gd_list):
""" The BLAST screen takes three stages. Firstly we construct a FASTA
sequence file containing all primer forward and reverse sequences,
for all primers in each GenomeData object of the list.
We then use the local BLAST+ (not legacy BLAST) interface to BLASTN to
query the named database with the input file. The multiprocessing
of BLASTN is handled by either our multiprocessing threading approach,
or by SGE; we don't use the built-in threading of BLAST so that we
retain flexibility when moving to SGE. It's a small modification to
revert to using the BLAST multithreading code. The output file is
named according to the GenomeData object.
The final step is to parse the BLAST output, and label the primers
that make hits as not having passed the BLAST filter.
"""
build_blast_input(gd_list)
run_blast(gd_list)
parse_blast(gd_list)
# Write BLAST input files for each GenomeData object
def build_blast_input(gd_list):
""" Loops over each GenomeData object in the list, and writes forward
and reverse primer sequences out in FASTA format to a file with
filename derived from the GenomeData object name.
"""
time_start = time.time()
logger.info("Writing files for BLAST input ...")
for gd_obj in gd_list:
gd_obj.blastinfilename =\
os.path.join(os.path.split(gd_obj.seqfilename)[0],
"%s_BLAST_input.fas" % gd_obj.name)
seqrecords = []
for name, primer in gd_obj.primers.items():
seqrecords.append(SeqRecord(Seq(primer.forward_seq),
id=name + '_forward'))
seqrecords.append(SeqRecord(Seq(primer.reverse_seq),
id=name + '_reverse'))
logger.info("... writing %s ...", gd_obj.blastinfilename)
SeqIO.write(seqrecords,
open(gd_obj.blastinfilename, 'w'),
'fasta')
logger.info("... done (%.3fs)", time.time() - time_start)
# Run BLAST screen for each GenomeData object
def run_blast(gd_list):
""" Loop over the GenomeData objects in the passed list, and run a
suitable BLASTN query with the primer sequences, writing to a file
with name derived from the GenomeData object, in XML format.
"""
logger.info("Compiling BLASTN command-lines ...")
clines = []
for gd_obj in gd_list:
gd_obj.blastoutfilename =\
os.path.join(os.path.split(gd_obj.seqfilename)[0],
"%s_BLAST_output.xml" % gd_obj.name)
cline = NcbiblastnCommandline(query=gd_obj.blastinfilename,
db=options.blastdb,
task='blastn', # default: MEGABLAST
out=gd_obj.blastoutfilename,
num_alignments=1,
num_descriptions=1,
outfmt=5,
perc_identity=90,
ungapped=True)
clines.append(str(cline) + log_output(gd_obj.name + ".blastn"))
logger.info("... BLASTN+ jobs to run:")
logger.info("Running:\n" + '\n'.join(clines))
if not options.sge:
multiprocessing_run(clines)
else:
sge_run(clines)
# Parse BLAST output for each GenomeData object
def parse_blast(gd_list):
""" Loop over the GenomeData objects in the passed list, and parse the
BLAST XML output indicated in the .blastoutfilename attribute.
For each query that makes a suitable match, mark the appropriate
primer's .blastpass attribute as False
"""
time_start = time.time()
logger.info("Parsing BLASTN output with multiprocessing ...")
# Here I'm cheating a bit and using multiprocessing directly so that
# we can speed up the parsing process a bit
pool = multiprocessing.Pool(processes=options.cpus)
pool_results = [pool.apply_async(process_blastxml,
(g.blastoutfilename, g.name))
for g in gd_list]
pool.close()
pool.join()
# Process the results returned from the BLAST searches. Create a
# dictionary of GenomeData objects, keyed by name, and loop over the
# result sets, setting .blastpass attributes for the primers as we go
gddict = {}
for gd_obj in gd_list:
gddict.setdefault(gd_obj.name, gd_obj)
failcount = 0
for result in [r.get() for r in pool_results]:
for name in result:
gd_obj = gddict[name.split('_primer_')[0]]
gd_obj.primers[name].blastpass = False
failcount += 1
logger.info("... %d primers failed BLAST screen ...", failcount)
logger.info("... multiprocessing BLAST parsing complete (%.3fs)",
time.time() - time_start)
# BLAST XML parsing function for multiprocessing
def process_blastxml(filename, name):
""" Takes a BLAST output file, and a process name as input. Returns
a set of query sequence names that make a suitably strong hit to
the database.
We are using the database as a screen, so *any* hit that passes
our criteria will do; BLAST+ reports the hits in quality order, so
we only need to see this top hit.
We care if the screening match is identical for at least 90% of
the query, and we're using ungapped alignments, so we check
the alignment HSP identities against the length of the query.
"""
time_start = time.time()
logger.info("[process name: %s] Parsing BLAST XML ...", name)
# List to hold queries that hit the database
matching_primers = set()
recordcount = 0
# Parse the file
try:
for record in NCBIXML.parse(open(filename, 'rU')):
recordcount += 1 # Increment our count of matches
# We check whether the number of identities in the alignment is
# greater than our (arbitrary) 90% cutoff. If so, we add the
# query name to our set of failing/matching primers
if len(record.alignments):
identities = float(record.alignments[0].hsps[0].identities) / \
float(record.query_letters)
if 0.9 <= identities:
matching_primers.add('_'.join(
record.query.split('_')[:-1]))
logger.info("[process name: %s] Parsed %d records",
name, recordcount)
except IOError:
logger.info("[process name: %s] Error reading BLAST XML file", name)
logger.info("[process name: %s] Time spent in process: (%.3fs)",
name, time.time() - time_start)
# Return the list of matching primers
return matching_primers
# A function for parsing features from Prodigal output
def parse_prodigal_features(filename):
""" Parse Prodigal 'GenBank' output.
We try to emulate SeqIO.read() SeqRecord output as much as possible,
but the information provided by Prodigal is limited to feature type
and location, on a single line.
Amended: Newer versions of Prodigal write closer match to GenBank
format, and thus if the first line matches "DEFINITION" we use SeqIO.
RE-amended: Latest version of Prodigal is still not good enough for
SeqIO, so a new function is created to parse line-by-line.
"""
record = SeqRecord(None) # record gets a dummy sequence
# Open filehandle and parse contents
handle = open(filename, 'rU')
# init feature list from file parsing
record.features = seqrecord_parse(handle)
return record
# Parse record features from the lines of prodigal or genbank format file
def seqrecord_parse(filehandle):
""" Parses the head lines of CDS features from a Prodigal or GenBank
file.
This is still necessary, as Prodigal's GenBank output is not
SeqIO.read() friendly.
"""
features = []
for line in filehandle:
if re.search("CDS", line):
data = [e.strip() for e in line.split()]
feature = gb_string_to_feature(data[-1])
feature.type = data[0]
features.append(feature)
return features
# Parse record features from sequence file, using SeqIO
def seqrecord_parse_seqio(filehandle, seqformat):
""" NOTE: Latest version of prodigal output is *closer* to GenBank format
but not close enough for SeqIO to find the genome.features
Thus: this function NOT USED (until potential update to prodigal
or SeqIO).
"""
features = []
seqrecord = list(SeqIO.parse(filehandle, seqformat))
for record in seqrecord:
logger.debug("record seq: [%s]...", record.seq[0:12])
features.append(record.features)
return features
# Code (admittedly hacky) from Brad Chapman to parse a GenBank command line
def gb_string_to_feature(content, use_fuzziness=True):
"""Convert a GenBank location string into a SeqFeature.
"""
consumer = _FeatureConsumer(use_fuzziness)
consumer._cur_feature = SeqFeature()
consumer.location(content)
return consumer._cur_feature
# Run PrimerSearch all-against-all on a list of GenomeData objects
def primersearch(gd_list):
""" Loop over the GenomeData objects in the passed list, and construct
command lines for an all-against-all PrimerSearch run.
Output files are of the format
<query name>_vs_<target name>.primersearch
Where <query name> and <target name> are the gd.name attributes of
the source and target GenomeData objects, respectively.
The output file goes in the same location as the source sequence
file.
"""
logger.info("Constructing all-against-all PrimerSearch runs " +
"for %d objects ...", len(gd_list))
# Create list of command-lines
clines = []
for query_gd in gd_list:
query_gd.primersearch_output = []
for target_gd in gd_list:
if query_gd != target_gd:
# Location of PrimerSearch output
outdir = os.path.split(query_gd.seqfilename)[0]
outfilename = os.path.join(outdir, "%s_vs_%s.primersearch" %
(query_gd.name, target_gd.name))
query_gd.primersearch_output.append(outfilename)
# Create command-line
cline = PrimerSearchCommandline()
cline.auto = True
cline.seqall = target_gd.seqfilename
cline.infile = query_gd.primersearchfilename
cline.outfile = outfilename
cline.mismatchpercent = options.mismatchpercent
clines.append(str(cline) +
log_output(os.path.basename(outfilename)))
logger.info("... PrimerSearch jobs to run: ...")
logger.info("Running:\n" + '\n'.join(clines))
# Parallelise jobs
if not options.sge:
multiprocessing_run(clines)
else:
sge_run(clines)
# Load in existing PrimerSearch output
def load_existing_primersearch_results(gd_list):
""" Associates PrimerSearch output files with each GenomeData object
and returns a list of (name, filename) tuples for all GenomeData
objects
"""
time_start = time.time()
logger.info("Locating existing PrimerSearch input files ...")
primersearch_results = []
for gd_obj in gd_list:
gd_obj.primersearch_output = []
filedir = os.path.split(gd_obj.seqfilename)[0]
primersearch_files = [f for f in os.listdir(filedir) if
os.path.splitext(f)[-1] == '.primersearch' and
f.startswith(gd_obj.name)]
for filename in primersearch_files:
logger.info("... found %s for %s ...", filename, gd_obj.name)
gd_obj.primersearch_output.append(os.path.join(filedir,
filename))
logger.info("... found %d PrimerSearch input files (%.3fs)",
len(primersearch_results), time.time() - time_start)
# Run primersearch to find whether and where the predicted primers amplify
# our negative target (the one we expect exactly one match to)
def find_negative_target_products(gd_list):
""" We run primersearch using the predicted primers as queries, with
options.single_product as the target sequence. We exploit
multiprocessing, and use the prescribed number of
CPUs. Happily, primersearch accepts multiple sequence FASTA files.
"""
logger.info("Constructing negative control PrimerSearch runs " +
"for %d objects ...", len(gd_list))
# Create list of command-lines
clines = []
for query_gd in gd_list:
query_gd.primersearch_output = []
outdir = os.path.split(query_gd.seqfilename)[0]
outfilename = os.path.join(outdir, "%s_negative_control.primersearch" %
query_gd.name)
query_gd.primersearch_output.append(outfilename)
# Create command-line
cline = PrimerSearchCommandline()
cline.auto = True
cline.seqall = options.single_product
cline.infile = query_gd.primersearchfilename
cline.outfile = outfilename
cline.mismatchpercent = options.mismatchpercent
clines.append(str(cline) + log_output(os.path.basename(outfilename)))
logger.info("... PrimerSearch jobs to run: ...")
logger.info("Running:\n" + '\n'.join(clines))
# Parallelise jobs and run
if not options.sge:
multiprocessing_run(clines)
else:
sge_run(clines)
# Classify the primers in a list of GenomeData objects according to the
# other sequences that they amplify
def classify_primers(gd_list):
""" Takes a list of GenomeData objects and loops over the primersearch
results, loading in the primersearch results and applying them to the
associated query GenomeData object.
If a primer is reported, by PrimerSearch, to amplify a region of the
target genome, two changes are made to the corresponding Primer
object in the amplifies_object and amplifies_family ad hoc attributes,
with the target name and family, respectively, being added to those
sets.
"""
time_start = time.time()
logger.info("Classifying primers by PrimerSearch results ...")
# Convenience dictionary, keying each GenomeData object by name
# We need to load this fully before checking the PrimerSearch output
# below.
gddict = {}
for gd_obj in gd_list:
gddict.setdefault(gd_obj.name, gd_obj)
# Parse the PrimerSearch output, updating the primer contents of the
# appropriate GenomeData object, for each set of results
for gd_obj in gd_list:
logger.info("... GenomeData for %s ...", gd_obj.name)
for filename in gd_obj.primersearch_output:
logger.info("... processing %s ...", filename)
# Identify the target organism
targetname = \
os.path.splitext(os.path.split(
filename)[-1])[0].split('_vs_')[-1]
# Only classify amplimers to sequences in the gdlist dataset
# This avoids problems with recording counts of matches to
# sequences that we're not considering, artifically lowering the
# specificity counts.
if targetname in gddict:
# Load the contents of the PrimerSearch output
psdata = PrimerSearch.read(open(filename, 'rU'))
# We loop over each primer in psdata and, if the primer has a
# length this indicates that it amplifies the target. When
# this is the case we add the organism name and the family
# name to the appropriate primer in the query GenomeData object
for pname, pdata in psdata.amplifiers.items():
if len(pdata):
# Primer amplifies
gd_obj.primers[pname].amplifies_organism.add(
targetname)
for family in gddict[targetname].families:
gd_obj.primers[pname].amplifies_family.add(family)
# Consider the negative control primersearch output
elif 'negative_control' in filename:
# Load PrimerSearch data
psdata = PrimerSearch.read(open(filename, 'rU'))
# We loop over each primer, and find the number of amplimers.
# We note the number of amplimers as an attribute of the primer
for pname, pdata in psdata.amplifiers.items():
gd_obj.primers[pname].negative_control_amplimers =\
len(pdata)
logger.info("Found %d amplimers in negative control",
len(pdata))
logger.info("... processed %d Primersearch results for %s ...",
len(gd_obj.primersearch_output), gd_obj.name)
logger.info("... processed PrimerSearch results (%.3fs)",
time.time() - time_start)
# Write analysis data to files
def write_report(gd_list, blastfilter):
""" Write a tab-separated table of information to the passed
filename, summarising the distribution of unique, family-unique,
and universal (for this set) primers amongst the GenomeData objects
in gdlist. Also write out to this file the locations of the files
containing the data used to generate the information.
In addition, write out the following files in ePrimer3 format:
i) <query_name>_specific.eprimer3 - unique primers for each query
GenomeData object
ii) <family>_specific.eprimer3 - unique primers for each family in
the GenomeData set
iii) universal_primers.eprimer3 - primers that amplify all members of
the GenomeData set
"""
time_start = time.time()
logger.info("Creating summary output ...")
# First we need to generate a dictionary of GenomeData object names, keyed
# by family
families = defaultdict(set)
for gd_obj in gd_list:
for family in gd_obj.families:
families[family].add(gd_obj.name)
# Rectify nocds flag
cds_overlap = not options.nocds
# Check whether output directory exists and, if not, create it
if not os.path.isdir(options.outdir):
os.mkdir(options.outdir)
# Open output file, and write header
outfh = open(os.path.join(options.outdir,
'differential_primer_results.tab'), 'w')
outfh.write(os.linesep.join([
"# Summary information table",
"# Generated by find_differential_primers",
"# Columns in the table:",
"# 1) Query organism ID",
"# 2) Query organism families",
"# 3) Count of organism-unique primers",
"# 4) Count of universal primers",
"# 5) Query sequence filename",
"# 6) Query feature filename",
"# 7) Query ePrimer3 primers filename"]) + '\n')
# Write data for each GenomeData object
other_org_count = len(gd_list) - 1 # Amplifications for 'universal' set
# We store 'universal' primers in their own list, and family-specific
# primers in a dicitonary, keyed by family
all_universal_primers = []
family_specific_primers = defaultdict(list)
# Loop over each GenomeData object and populate family-specific and
# universal primer collections, as well as organism-specific and
# summary information
for gd_obj in gd_list:
logger.info('\n'.join([
"... writing data for %s ..." % gd_obj.name,
"... cds_overlap: %s ..." % cds_overlap,
"... gc3primevalid: %s ..." % options.filtergc3prime,
"... oligovalid: %s ..." % options.hybridprobe,
"... blastpass: %s ..." % blastfilter,
"... single_product %s ..." % (options.single_product is
not None),
"... retrieving primer pairs ...",
"... finding strain-specific primers for %s ..." % gd_obj.name
]))
unique_primers = gd_obj.get_unique_primers(cds_overlap, blastfilter)
logger.info("... finding family-specific primers for %s ...",
gd_obj.name)
family_unique_primers = {}
for family in gd_obj.families:
logger.info("Checking family: %s" % family)
logger.info("families[%s]: %s" % (family, families[family]))
family_unique_primers[family] = \
gd_obj.get_family_unique_primers(families[family], cds_overlap,
blastfilter)
family_specific_primers[family] += family_unique_primers[family]
logger.info("family_unique_primers[%s]: %d" %
(family, len(family_unique_primers[family])))
logger.info("family_specific_primers[%s]: %d" %
(family, len(family_specific_primers[family])))
logger.info("... finding universal primers for %s ...", gd_obj.name)
universal_primers = \
gd_obj.get_primers_amplify_count(other_org_count, cds_overlap,
blastfilter)
all_universal_primers.extend(universal_primers)
# Write summary data to file
outfh.write('\t'.join([gd_obj.name, ','.join(gd_obj.families),
str(len(unique_primers)),
str(len(universal_primers)),
str(gd_obj.seqfilename),
str(gd_obj.ftfilename),
str(gd_obj.primerfilename)]) + '\n')
# Write organism-specific primers to file
write_eprimer3(unique_primers,
os.path.join(options.outdir,
"%s_specific_primers.eprimer3" %
gd_obj.name), gd_obj.seqfilename)
# Write organism-specific amplicons to file
SeqIO.write([p.amplicon for p in unique_primers],
os.path.join(options.outdir,
"%s_specific_amplicons.fas" % gd_obj.name),
'fasta')
outfh.close()
# Write universal primers to file
write_eprimer3(universal_primers,
os.path.join(options.outdir, "universal_primers.eprimer3"),
'', append=True)
# Write organism-specific amplicons to file
SeqIO.write([p.amplicon for p in universal_primers],
open(os.path.join(options.outdir,
"universal_amplicons.fas"), 'w'),
'fasta')
# Write family-specific primers to files
outfh = open(os.path.join(options.outdir,
'differential_primer_results-families.tab'),
'w')
outfh.write(os.linesep.join([
"# Summary information table",
"# Generated by find_differential_primers",
"# Columns in the table:",
"# 1) Family",
"# 2) Count of family-specific primers",
"# 3) Family-specific primer file",
"# 4) Family-specific amplicon file"]) + '\n')
for family, primers in family_specific_primers.items():
outstr = [family, str(len(primers))]
fname = os.path.join(options.outdir,
"%s_family-specific_primers.eprimer3" %
family)
write_eprimer3(primers, fname, '')
outstr.append(fname)
# Write family-specific amplicons to file
fname = os.path.join(options.outdir,
"%s_family-specific_amplicons.fas" %
family)
SeqIO.write([p.amplicon for p in primers], open(fname, 'w'), 'fasta')
outstr.append(fname)
outfh.write('\t'.join(outstr) + '\n')
# Being tidy...
outfh.close()
logger.info("... data written (%.3fs)", time.time() - time_start)
# Write ePrimer3 format primer file
def write_eprimer3(primers, filename, sourcefilename, append=False):
""" Write the passed primers to the passed file, in ePrimer3-
compatible format.
"""
logger.info("Writing %d primer pairs to %s ...", len(primers), filename)
# Open file
filemode = 'a' if append else 'w' # Do we append or write anew?
outfh = open(filename, filemode)
# Write header
outfh.write(os.linesep.join([
"# EPRIMER3 PRIMERS %s " % filename,
"# Start Len Tm GC% Sequence",
os.linesep]) + '\n')
primercount = 0
for primer in primers:
primercount += 1
outfh.write("# %s %s\n" % (primer.name, sourcefilename))
outfh.write("%-4d PRODUCT SIZE: %d\n" % (primercount, primer.size))
outfh.write(" FORWARD PRIMER %-9d %-3d %.02f %.02f %s\n" %
(primer.forward_start, primer.forward_length,
primer.forward_tm, primer.forward_gc,
primer.forward_seq))
outfh.write(" REVERSE PRIMER %-9d %-3d %.02f %.02f %s\n" %
(primer.reverse_start, primer.reverse_length,
primer.reverse_tm, primer.reverse_gc,
primer.reverse_seq))
if hasattr(primer, 'internal_start'):
outfh.write(" INTERNAL OLIGO %-9d %-3d %.02f %.02f %s\n" %
(primer.internal_start, primer.internal_length,
primer.internal_tm, primer.internal_gc,
primer.internal_seq))
outfh.write(os.linesep * 3)
# Be tidy
outfh.close()
# Run the passed list of command-lines using a multiprocessing.Pool
def multiprocessing_run(clines):
""" We create a multiprocessing Pool to handle command-lines We
pass the (unique) GenomeData object name, and the location of the
sequence file. The called function returns the GenomeData name and the
corresponding location of the generated feature file. The GenomeData
objects are stored in a temporary dictionary, keyed by gd.name, to
allow association of the results of the asynchronous pool jobs with the
correct GenomeData object
"""
time_start = time.time()
logger.info("Running %d jobs with multiprocessing ...",
len(clines))
pool = multiprocessing.Pool(processes=options.cpus) # create process pool
completed = []
if options.verbose:
callback_fn = multiprocessing_callback
else:
callback_fn = completed.append
for cline in clines:
pool.apply_async(subprocess.call,
(str(cline), ),
{'stderr': subprocess.PIPE,
'shell': sys.platform != "win32"},
callback=callback_fn)
pool.close() # Run jobs
pool.join()
logger.info("Completed:\n" + '\n'.join([str(e) for e in completed]))
logger.info("... all multiprocessing jobs ended (%.3fs)",
time.time() - time_start)
# Add a multiprocessing callback function here
def multiprocessing_callback(val):
""" A verbose callback function for multiprocessing runs. It uses the
return value to indicate run completion or failure. Failure is
indicated by a nonzero return from the multiprocessing call.
"""
if 0 == val:
logger.info("... multiprocessing run completed (status: %s) ...", val)
else:
logger.error("... problem with multiprocessing run (status: %s) ...",
val)
# Clean output for each GenomeData object in the passed list
def clean_output(gd_list):
""" Remove .eprimer3, .primers, .prodigalout, and .primersearch files
from the same directory as the sequence file for each passed
PrimerSearch object
"""
time_start = time.time()
logger.info("Cleaning up output files for GenomeData objects ...")
# Loop over each GenomeData object, and remove each output file
for gd_obj in gd_list:
seqdir = os.path.split(gd_obj.seqfilename)[0]
for filename in [f for f in os.listdir(seqdir)
if os.path.splitext(f)[-1] in
['.eprimer3', 'primers', '.prodigalout',
'.primersearch', '.xml']]:
abspath = os.path.join(seqdir, filename)
logger.info("... deleting %s ...", abspath)
os.remove(abspath) # You can never go back after this point
logger.info("... done (%.3fs)", time.time() - time_start)
# construct str to concat on end of cline if option.keep_logs is set
def log_output(filename):
""" predefine file extension and stream to print to.
if log_dir exists, join it to filename
else output to base filename.
"""
log_extension = ".log"
log_out_handle = " 2> "
if options.keep_logs and options.log_dir:
return log_out_handle + os.path.join(options.log_dir, filename) +\
log_extension
elif options.keep_logs:
return log_out_handle + filename + log_extension
else:
return ""
# run list of command-line jobs with SGE
def sge_run(*args):
""" Function intended to compile a passed list of command lines, and
run them on SGE.
"""
raise NotImplementedError
###
# SCRIPT
if __name__ == '__main__':
# Parse cmd-line
options, arguments, optparser = parse_cmdline()
# Set up logging, and modify loglevel according to whether we need
# verbosity or not
# err_handler points to sys.stderr
# err_handler_file points to a logfile, if named
logger = logging.getLogger('find_differential_primers.py')
logger.setLevel(logging.DEBUG)
err_handler = logging.StreamHandler(sys.stderr)
err_formatter = logging.Formatter('%(levelname)s: %(message)s')
err_handler.setFormatter(err_formatter)
if options.logfile is not None:
try:
logstream = open(options.logfile, 'w')
err_handler_file = logging.StreamHandler(logstream)
err_handler_file.setFormatter(err_formatter)
err_handler_file.setLevel(logging.INFO)
logger.addHandler(err_handler_file)
except IOError:
logger.error("Could not open %s for logging",
options.logfile)
sys.exit(1)
if options.verbose:
err_handler.setLevel(logging.INFO)
else:
err_handler.setLevel(logging.WARNING)
logger.addHandler(err_handler)
logger.info('# find_differential_primers.py logfile')
logger.info('# Run: %s', time.asctime())
# Report arguments, if verbose
logger.info(options)
logger.info(arguments)
# Create our GenomeData objects. If there is no configuration file
# specified, raise an error and exit. Otherwise we end up with a list
# of GenomeData objects that are populated only with the data from the
# config file
if options.filename is None:
optparser.print_help()
raise IOError("No configuration file specified")
gdlist = create_gd_from_config(options.filename)
# If the user wants to clean the directory before starting, do so
if options.clean or options.cleanonly:
clean_output(gdlist)
if options.cleanonly:
sys.exit(0)
# It is possible that the sequence file for a GenomeData object might
# be a multi-sequence file describing scaffolds or contigs. We create a
# concatenated sequence to facilitate further analyses, if this is the
# case. Where a sequence needs to be concatenated, this will affect the
# placement of features and/or primers, so any specified files are
# reset to None
check_single_sequence(gdlist)
# What EMBOSS version is available? This is important as the ePrimer3
# command-line changes in v6.6.0, which is awkward for the Biopython
# interface.
embossversion = \
subprocess.check_output("embossversion",
stderr=subprocess.PIPE,
shell=sys.platform != "win32").strip()
logger.info("EMBOSS version reported as: %s", embossversion)
# We need to check the existence of a prescribed feature file and, if
# there is not one, create it. We don't bother if the --nocds flag is set.
if not (options.nocds or options.noprodigal):
logger.info("--nocds option not set: " +
"Checking existence of features...")
check_ftfilenames(gdlist)
elif options.nocds:
logger.warning("--nocds option set: Not checking or " +
"creating feature files")
else:
logger.warning("--noprodigal option set: Not predicting new CDS")
# We need to check for the existence of primer sequences for the organism
# and, if they do not exist, create them using ePrimer3. If the
# --noprimer3 flag is set, we do not create new primers, but even if the
# --noprimersearch flag is set, we still need to check whether the
# primer files are valid
if not options.noprimer3:
logger.info("--noprimer3 flag not set: Predicting new primers")
check_primers(gdlist)
predict_primers(gdlist, embossversion)
else:
logger.warning("--noprimer3 flag set: Not predicting new primers")
# With a set of primers designed for the organism, we can load them into
# the GenomeData object, filtering for those present only in the CDS,
# if required. This step is necessary, whether or not a new ePrimer3
# prediction is made. We also filter on GC content at the primer 3' end,
# if required.
logger.info("Loading primers...")
load_primers(gdlist)
# At this point, we can check our primers against a prescribed BLAST
# database. How we filter these depends on the user's preference.
# We screen against BLAST here so that we can flag an attribute on
# each primer to say whether or not it passed the BLAST screen.
if options.blastdb and not options.useblast:
logger.info("--blastdb options set: BLAST screening primers...")
blast_screen(gdlist)
elif options.useblast:
logger.warning("--useblast option set: " +
"using existing BLAST results...")
else:
logger.warning("No BLAST options set, not BLAST screening primers...")
# Having a set of (potentially CDS-filtered) primers for each organism,
# we then scan these primers against each of the other organisms in the
# set, using the EMBOSS PrimerSearch package
# (http://embossgui.sourceforge.net/demo/manual/primersearch.html)
# Now we have all the data we need to run PrimerSearch in an all-vs-all
# manner, so make a cup of tea, put your feet up, and do the comparisons
# with EMBOSS PrimerSearch
# (http://embossgui.sourceforge.net/demo/manual/primersearch.html)
if options.noprimersearch:
logger.warning("--noprimersearch flag set: Not running PrimerSearch")
# Load the appropriate primersearch output files for each
# GenomeData object
load_existing_primersearch_results(gdlist)
else:
logger.info("--noprimersearch flag not set: Running PrimerSearch")
# We write input for PrimerSearch ignoring all the filters; this lets
# us turn off PrimerSearch and rerun the analysis with alternative
# filter settings
for gd in gdlist:
gd.write_primers()
# Run PrimerSearch
primersearch(gdlist)
# If the --single_product option is specified, we load in the sequence
# file to which the passed argument refers, and filter the primer
# sequences on the basis of how many amplification products are produced
# from these sequences. We expect exactly one amplification product per
# primer set, if it's not degenerate on the target sequence
# (note that this filter is meaningless for family-specific primers)
if options.single_product:
find_negative_target_products(gdlist)
logger.info("--blastdb options set: BLAST screening primers...")
blast_screen(gdlist)
# Now we classify the primer sets according to which sequences they amplify
if not options.noclassify:
logger.info("Classifying primers and writing output files ...")
# Classify the primers in each GenomeData object according to
# the organisms and families that they amplify, using the
# PrimerSearch results.
classify_primers(gdlist)
# All the data has been loaded and processed, so we can now create our
# plaintext summary report of the number of unique, family-unique and
# universal primers in each of the organisms
write_report(gdlist, (options.blastdb is not None or options.useblast))
|
alainr85/bio_primers
|
find_differential_primers/find_differential_primers.py
|
Python
|
gpl-3.0
| 83,235
|
[
"BLAST",
"Biopython"
] |
34b1b8419fe51782860983c91c840f4f25e9dc9101b4689070ccb3a8184dc494
|
"""
=================================
Gaussian Mixture Model Ellipsoids
=================================
Plot the confidence ellipsoids of a mixture of two Gaussians with EM
and variational Dirichlet process.
Both models have access to five components with which to fit the
data. Note that the EM model will necessarily use all five components
while the DP model will effectively only use as many as are needed for
a good fit. This is a property of the Dirichlet Process prior. Here we
can see that the EM model splits some components arbitrarily, because it
is trying to fit too many components, while the Dirichlet Process model
adapts it number of state automatically.
This example doesn't show it, as we're in a low-dimensional space, but
another advantage of the Dirichlet process model is that it can fit
full covariance matrices effectively even when there are less examples
per cluster than there are dimensions in the data, due to
regularization properties of the inference algorithm.
"""
import itertools
import numpy as np
from scipy import linalg
import matplotlib.pyplot as plt
import matplotlib as mpl
import IPython
from sklearn import mixture
# Number of samples per component
n_samples = 500
# Generate random sample, two components
np.random.seed(0)
C = np.array([[0., -0.1], [1.7, .4]])
X = np.r_[np.dot(np.random.randn(n_samples, 2), C),
.7 * np.random.randn(n_samples, 2) + np.array([-6, 3])]
# Fit a mixture of Gaussians with EM using five components
gmm = mixture.GMM(n_components=5, covariance_type='full')
gmm.fit(X)
# Fit a Dirichlet process mixture of Gaussians using five components
dpgmm = mixture.DPGMM(n_components=5, covariance_type='full')
dpgmm.fit(X)
IPython.embed()
color_iter = itertools.cycle(['r', 'g', 'b', 'c', 'm'])
for i, (clf, title) in enumerate([(gmm, 'GMM'),
(dpgmm, 'Dirichlet Process GMM')]):
splot = plt.subplot(2, 1, 1 + i)
Y_ = clf.predict(X)
IPython.embed()
for i, (mean, covar, color) in enumerate(zip(
clf.means_, clf._get_covars(), color_iter)):
v, w = linalg.eigh(covar)
u = w[0] / linalg.norm(w[0])
# as the DP will not use every component it has access to
# unless it needs it, we shouldn't plot the redundant
# components.
if not np.any(Y_ == i):
continue
plt.scatter(X[Y_ == i, 0], X[Y_ == i, 1], .8, color=color)
# Plot an ellipse to show the Gaussian component
angle = np.arctan(u[1] / u[0])
angle = 180 * angle / np.pi # convert to degrees
ell = mpl.patches.Ellipse(mean, v[0], v[1], 180 + angle, color=color)
ell.set_clip_box(splot.bbox)
ell.set_alpha(0.5)
splot.add_artist(ell)
plt.xlim(-10, 10)
plt.ylim(-3, 6)
plt.xticks(())
plt.yticks(())
plt.title(title)
plt.show()
|
BerkeleyAutomation/vtsc
|
scripts/examples/plot_gmm.py
|
Python
|
mit
| 2,870
|
[
"Gaussian"
] |
30d3eff81a9b3f3edd550c5e95ceee850be439e534b2ad7bfe4ce58495bf91f0
|
import pandas
import os
import os.path
##--------------------------------------------------------------------
## Simulation suite
## - "swissface"
## - "fluxnet"
## - "fluxnet2015"
## - "fluxnet_cnmodel"
## - "gcme"
## - "campi"
## - "campi_cmodel"
## - "fluxnet_fixalloc"
## - "atkin"
## - "atkinfull"
## - "olson"
## - "olson_cmodel"
##--------------------------------------------------------------------
## For global simulations, set simsuite to 'global'.
## This links NetCDF input files from directories mirrored locally from
## /work/bstocker/labprentice/data on Imperial's HPC CX1 server into the
## input directory structure required for SOFUN.
##--------------------------------------------------------------------
## For an example simulation (simulation name 'EXAMPLE_global'), set
## simsuite to 'example'. This should work after cloning this repo
## from github.
##--------------------------------------------------------------------
simsuite = 'fluxnet2015'
##--------------------------------------------------------------------
## Get site/experiment names
##--------------------------------------------------------------------
filnam_siteinfo_csv = '../input_' + simsuite + '_sofun/experiments_' + simsuite + '_sofun.csv'
if os.path.exists( filnam_siteinfo_csv ):
print 'reading site information file ' + filnam_siteinfo_csv + ' ...'
siteinfo = pandas.read_csv( filnam_siteinfo_csv )
elif simsuite == 'example':
print 'Executing single example simulation...'
else:
print 'site info file does not exist: ' + filnam_siteinfo_csv
print 'writing file sitelist.txt'
fil = open('sitelist.txt', 'w')
for index, row in siteinfo.iterrows():
fil.write( row['expname'] + '\n' )
|
stineb/sofun
|
get_sitelist_simsuite.py
|
Python
|
lgpl-2.1
| 1,708
|
[
"NetCDF"
] |
659699c2402fed697dc0701740c246a7fa8057f311fa074ff606dceadd53f952
|
''' <h1> Library for specular magnetic x-ray reflectivity</h1>
The magnetic reflectivity is calculated according to: S.A. Stephanov and S.K Shina PRB 61 15304.
Note: The documentation is not updated from the interdiff model!
<h2>Classes</h2>
<h3>Layer</h3>
<code> Layer(b = 0.0, d = 0.0, f = 0.0+0.0J, dens = 1.0, magn_ang = 0.0, magn = 0.0, sigma = 0.0)</code>
<dl>
<dt><code><b>d</b></code></dt>
<dd>The thickness of the layer in AA (Angstroms = 1e-10m)</dd>
<dt><code><b>f</b></code></dt>
<dd>The x-ray scattering length per formula unit in electrons. To be strict it is the
number of Thompson scattering lengths for each formula unit.</dd>
<dt><code><b>dens</b></code></dt>
<dd>The density of formula units in units per Angstroms. Note the units!</dd>
<dt><code><b>sigmai</b></code></dt>
<dd>The root mean square <em>interdiffusion</em> of the top interface of the layer in Angstroms.</dd>
<dt><code><b>sigmar</b></code></dt>
<dd>The root mean square <em>roughness</em> of the top interface of the layer in Angstroms.</dd>
</dl>
<h3>Stack</h3>
<code> Stack(Layers = [], Repetitions = 1)</code>
<dl>
<dt><code><b>Layers</b></code></dt>
<dd>A <code>list</code> consiting of <code>Layer</code>s in the stack
the first item is the layer closest to the bottom</dd>
<dt><code><b>Repetitions</b></code></dt>
<dd>The number of repsetions of the stack</dd>
</dl>
<h3>Sample</h3>
<code> Sample(Stacks = [], Ambient = Layer(), Substrate = Layer(), eta_z = 10.0,
eta_x = 10.0, h = 1.0)</code>
<dl>
<dt><code><b>Stacks</b></code></dt>
<dd>A <code>list</code> consiting of <code>Stack</code>s in the stacks
the first item is the layer closest to the bottom</dd>
<dt><code><b>Ambient</b></code></dt>
<dd>A <code>Layer</code> describing the Ambient (enviroment above the sample).
Only the scattering lengths and density of the layer is used.</dd>
<dt><code><b>Substrate</b></code></dt>
<dd>A <code>Layer</code> describing the substrate (enviroment below the sample).
Only the scattering lengths, density and roughness of the layer is used.</dd>
<dt><code><b>eta_z</b></code></dt>
<dd>The out-of plane (vertical) correlation length of the roughness
in the sample. Given in AA. </dd>
<dt><code><b>eta_x</b></code></dt>
<dd>The in-plane global correlation length (it is assumed equal for all layers).
Given in AA.</dd>
<dt><code><b>h</b></code></dt>
<dd>The jaggedness parameter, should be between 0 and 1.0. This describes
how jagged the interfaces are. This is also a global parameter for all
interfaces.</dd>
</dl>
<h3>Instrument</h3>
<code>Instrument(wavelength = 1.54, coords = 'tth',
I0 = 1.0 res = 0.001, restype = 'no conv', respoints = 5, resintrange = 2,
beamw = 0.01, footype = 'no corr', samplelen = 10.0, taylor_n = 1)</code>
<dl>
<dt><code><b>wavelength</b></code></dt>
<dd>The wavalelngth of the radiation givenin AA (Angstroms)</dd>
<dt><code><b>coords</b></code></dt>
<dd>The coordinates of the data given to the SimSpecular function.
The available alternatives are: 'q' or 'tth'. Alternatively the numbers
0 (q) or 1 (tth) can be used.</dd>
<dt><code><b>I0</b></code></dt>
<dd>The incident intensity (a scaling factor)</dd>
<dt><code><b>Ibkg</b></code></dt>
<dd>The background intensity. Added as a constant value to the calculated
reflectivity</dd>
<dt><code><b>res</b></code></dt>
<dd>The resolution of the instrument given in the coordinates of
<code>coords</code>. This assumes a gaussian reloution function and
<code>res</code> is the standard deviation of that gaussian.</dd>
<dt><code><b>restype</b></code></dt>
<dd>Describes the rype of the resolution calculated. One of the alterantives:
'no conv', 'fast conv', 'full conv and varying res.' or 'fast conv + varying res.'.
The respective numbers 0-3 also works. Note that fast convolution only alllows
a single value into res wheras the other can also take an array with the
same length as the x-data (varying resolution)</dd>
<dt><code><b>respoints</b></code></dt>
<dd>The number of points to include in the resolution calculation. This is only
used for 'full conv and vaying res.' and 'fast conv + varying res'</dd>
<dt><code><b>resintrange</b></code></dt>
<dd>Number of standard deviatons to integrate the resolution fucntion times
the relfectivty over</dd>
<dt><code><b>footype</b></code></dt>
<dd>Which type of footprint correction is to be applied to the simulation.
One of: 'no corr', 'gauss beam' or 'square beam'. Alternatively,
the number 0-2 are also valid. The different choices are self explanatory.
</dd>
<dt><code><b>beamw</b></code></dt>
<dd>The width of the beam given in mm. For 'gauss beam' it should be
the standard deviation. For 'square beam' it is the full width of the beam.</dd>
<dt><code><b>samplelen</b></code></dt>
<dd>The length of the sample given in mm</dd>
<dt><code><b>taylor_n</b></code></dt>
<dd>The number terms taken into account in the taylor expansion of
the fourier integral of the correlation function. More terms more accurate
calculation but also much slower.</dd>
'''
import lib.xrmr
import lib.edm_slicing as edm
try:
import lib.paratt_weave as Paratt
except StandardError,S:
print 'Not using inline c code for reflectivity calcs - can not import module'
print S
import lib.paratt as Paratt
from numpy import *
from scipy.special import erf
from lib.instrument import *
mag_limit = 1e-8
mpy_limit = 1e-8
# Preamble to define the parameters needed for the models outlined below:
ModelID='StephanovXRMR'
# Automatic loading of parameters possible by including this list
__pars__ = ['Layer', 'Stack', 'Sample', 'Instrument']
# Used for making choices in the GUI
instrument_string_choices = {'coords': ['q','tth'],
'restype': ['no conv', 'fast conv',
'full conv and varying res.',
'fast conv + varying res.'],
'footype': ['no corr', 'gauss beam',
'square beam'],
'pol':['circ+','circ-','tot', 'ass', 'sigma', 'pi'],
'theory': ['full', 'non-anisotropic'],
#'compress':['yes', 'no'],
#'slicing':['yes', 'no'],
}
InstrumentParameters={'wavelength':1.54,'coords':'tth','I0':1.0,'res':0.001,\
'restype':'no conv','respoints':5,'resintrange':2,'beamw':0.01,'footype': 'no corr',\
'samplelen':10.0, 'Ibkg': 0.0, 'pol':'circ+', 'theory':'full',}
# Coordinates=1 => twothetainput
# Coordinates=0 => Q input
#Res stddev of resolution
#ResType 0: No resolution convlution
# 1: Fast convolution
# 2: Full Convolution +varying resolution
# 3: Fast convolution varying resolution
#ResPoints Number of points for the convolution only valid for ResolutionType=2
#ResIntrange Number of standard deviatons to integrate over default 2
# Parameters for footprint coorections
# Footype: 0: No corections for footprint
# 1: Correction for Gaussian beam => Beaw given in mm and stddev
# 2: Correction for square profile => Beaw given in full width mm
# Samlen= Samplelength in mm.
#
#
InstrumentGroups = [('General', ['wavelength', 'coords', 'I0', 'Ibkg']),
('Resolution', ['restype', 'res', 'respoints', 'resintrange']),
('XRMR', ['pol', 'theory']),
('Footprint', ['footype', 'beamw', 'samplelen',]),
]
InstrumentUnits={'wavelength':'AA','coords':'','I0':'arb.','res':'[coord]',
'restype':'','respoints':'pts.','resintrange': '[coord]',
'beamw':'mm','footype': '',\
'samplelen':'mm', 'Ibkg': 'arb.', 'pol':'',
'theory':'',}
LayerParameters = {'dens':1.0, 'd':0.0, 'f': (0.0 + 1e-20J),
'fr':(0.0 + 1e-20J),
'fm1':(0.0 + 1e-20J), 'fm2':(0.0 + 1e-20J),
'phi_m': 0.0, 'theta_m': 0.0, 'resdens': 1.0,
'resmag': 1.0,
'sigma_c': 0.0, 'sigma_m': 0.0, 'mag':1.0,
'dmag_l': 1.0, 'dmag_u': 1.0, 'dd_m':0.0,
'b': 1e-20J
#'dtheta_l': 0.0, 'dtheta_u':0.0, 'dphi_l':0.0, 'dphi_u':0.0,
}
LayerUnits = {'dens':'at./AA^3', 'd':'AA', 'f': 'el.',
'fr':'el.',
'fm1':'el./mu_B', 'fm2':'el./mu_B',
'phi_m': 'deg.', 'theta_m': 'deg.', 'resdens': 'rel.',
'resmag': 'rel.',
'sigma_c': 'AA', 'sigma_m': 'AA', 'mag': 'mu_B',
'dmag_l': 'rel.', 'dmag_u': 'rel.', 'dd_m':'AA',
'b': 'fm'
#'dtheta_l': 0.0, 'dtheta_u':0.0, 'dphi_l':0.0, 'dphi_u':0.0,
}
LayerGroups = [('Scatt. len.', ['b', 'f', 'fr', 'fm1', 'fm2']),
('Magnetism', ['mag', 'resmag', 'phi_m','theta_m']),
('Misc.', ['sigma_c', 'dens', 'resdens', 'd']),
('Interf. Mag. Mom.', ['dmag_l', 'dmag_u', 'sigma_m', 'dd_m'])
]
#('Interf. Mag. Ang.', ('dtheta_l', 'dtheta_u', 'dphi_l', 'dphi_u'))
StackParameters = {'Layers':[], 'Repetitions':1}
SampleParameters = {'Stacks':[], 'Ambient':None, 'Substrate':None,
'compress':'yes', 'slicing':'no', 'slice_depth':1.0,
'sld_mult':4.0, 'sld_buffer': 20.0, 'sld_delta': 5.0,
'dsld_max':0.1, 'dsld_offdiag_max':0.1,
}
SampleGroups = [['Slicing', [ 'slicing', 'slice_depth', 'sld_mult', 'sld_buffer',
'sld_delta']],
['Compression', ['compress', 'dsld_max', 'dsld_offdiag_max']],
]
sample_string_choices = {'compress':['yes', 'no'],
'slicing':['yes', 'no'],
}
# A buffer to save previous calculations for spin-flip calculations
class Buffer:
W = None
parameters = None
g_0 = None
coords = None
wavelength = None
def Specular(TwoThetaQz, sample, instrument):
# preamble to get it working with my class interface
restype = instrument.getRestype()
weight = None
if restype == 2 or restype == instrument_string_choices['restype'][2]:
(TwoThetaQz,weight) = ResolutionVector(TwoThetaQz[:], \
instrument.getRes(), instrument.getRespoints(),\
range=instrument.getResintrange())
if instrument.getCoords() == 1 or\
instrument.getCoords() == instrument_string_choices['coords'][1]:
theta = TwoThetaQz/2
elif instrument.getCoords() == 0 or\
instrument.getCoords() == instrument_string_choices['coords'][0]:
theta = arcsin(TwoThetaQz/4/pi*instrument.getWavelength())*180./pi
R = reflectivity_xmag(sample, instrument, theta)
pol = instrument.getPol()
if pol != 3 and pol != instrument_string_choices['pol'][3]:
#FootprintCorrections
foocor = footprint_correction(instrument, theta)
R = correct_reflectivity(R, instrument, foocor, TwoThetaQz, weight)
return R*instrument.getI0() + instrument.getIbkg()
else:
foocor = footprint_correction(instrument, theta)*0 + 1.0
R = correct_reflectivity(R, instrument, foocor, TwoThetaQz, weight)
return R
def OffSpecular(TwoThetaQz, ThetaQx, sample, instrument):
raise NotImplementedError('Off specular calculations are not implemented for magnetic x-ray reflectivity')
def SLD_calculations(z, sample, inst):
''' Calculates the scatteringlength density as at the positions z
'''
lamda = inst.getWavelength()
d, sl_c, sl_m1, sl_m2, M, chi, non_mag, mpy = compose_sld(sample, inst, array([0.0,]))
new_size = len(d)*2
sl_cp = zeros(new_size, dtype = complex128)
sl_cp[::2] = sl_c
sl_cp[1::2] = sl_c
sl_m1p = zeros(new_size, dtype = complex128)
sl_m1p[::2] = sl_m1
sl_m1p[1::2] = sl_m1
sl_m2p = zeros(new_size, dtype = complex128)
sl_m2p[::2] = sl_m2
sl_m2p[1::2] = sl_m2
#print sl_m2p
z = zeros(len(d)*2)
z[::2] = cumsum(r_[0,d[:-1]])
z[1::2] = cumsum(r_[d])
#print d, z
#print z.shape, sl_c.shape
def interleave(a):
new_a = zeros(len(a)*2, dtype = complex128)
new_a[::2] = a
new_a[1::2] = a
return new_a
chi = [[interleave(c) for c in ch] for ch in chi]
#return {'real sld_c': sl_cp.real, 'imag sld_c': sl_cp.imag,
# 'real sld_m1': sl_m1p.real, 'imag sld_m1': sl_m1p.imag,
# 'real sld_m2': sl_m2p.real, 'imag sld_m2': sl_m2p.imag,
# 'z':z}
re = 2.8179402894e-5
c = 1/(lamda**2*re/pi)
return {'sl_xx':chi[0][0].real*c, 'sl_xy':chi[0][1].real*c, 'sl_xz':chi[0][2].real*c,
'sl_yy':chi[1][1].real*c,'sl_yz':chi[1][2].real*c,'sl_zz':chi[2][2].real*c,
'z':z}
def compose_sld(sample, instrument, theta):
lamda = instrument.getWavelength()
parameters = sample.resolveLayerParameters()
dens = array(parameters['dens'], dtype = float64)
resdens = array(parameters['resdens'], dtype = float64)
resmag = array(parameters['resmag'], dtype = float64)
mag = array(parameters['mag'], dtype = float64)
dmag_l = array(parameters['dmag_l'], dtype = float64)
dmag_u = array(parameters['dmag_u'], dtype = float64)
dd_m = array(parameters['dd_m'], dtype = float64)
#print [type(f) for f in parameters['f']]
f = array(parameters['f'], dtype = complex128) + (1-1J)*1e-20
fr = array(parameters['fr'], dtype = complex128) + (1-1J)*1e-20
fm1 = array(parameters['fm1'], dtype = complex128) + (1-1J)*1e-20
fm2 = array(parameters['fm2'], dtype = complex128) + (1-1J)*1e-20
d = array(parameters['d'], dtype = float64)
sl_c = dens*(f + resdens*fr)
sl_m1 = dens*resdens*resmag*fm1
sl_m2 = dens*resdens*resmag*fm2 #mag is multiplied in later
#g_0 = sin(theta*pi/180.0)
phi = array(parameters['phi_m'], dtype = float64)*pi/180.0
theta_m = array(parameters['theta_m'], dtype = float64)*pi/180.0
M = c_[cos(theta_m)*cos(phi), cos(theta_m)*sin(phi), sin(theta_m)]
#print M
sigma_c = array(parameters['sigma_c'], dtype = float64)
sigma_m = sqrt(array(parameters['sigma_m'], dtype = float64)**2 + sigma_c**2)
#print A, B
#print type(sample.getSld_buffer())
if sample.getSlicing() == sample_string_choices['slicing'][0]:
dz = sample.getSlice_depth()
reply= edm.create_profile_cm(d[1:-1], sigma_c[:-1].real, sigma_m[:-1].real,
[edm.erf_profile]*len(sl_c),
[edm.erf_interf]*len(sigma_c[:]),
dmag_l, dmag_u, mag, dd_m,
dz = dz, mult = sample.getSld_mult(),
buffer = sample.getSld_buffer(),
delta = sample.getSld_delta())
z, comp_prof, mag_prof = reply
sl_c_lay = comp_prof*sl_c[:, newaxis]
sl_c = sl_c_lay.sum(0)
sl_m1_lay = comp_prof*mag_prof*sl_m1[:, newaxis]
sl_m1 = sl_m1_lay.sum(0)
sl_m2_lay = comp_prof*mag_prof*sl_m2[:, newaxis]
sl_m2 = sl_m2_lay.sum(0)
#print comp_prof.shape, sl_m1_lay.shape, sl_c_lay.shape
M = rollaxis(array((ones(comp_prof.shape)*M[:,0][:, newaxis],
ones(comp_prof.shape)*M[:,1][:, newaxis],
ones(comp_prof.shape)*M[:,2][:, newaxis])),0, 3)
#print 'M', M
#print M[...,1].shape
re = 2.8179402894e-5
A = -lamda**2*re/pi*sl_c_lay
B = lamda**2*re/pi*sl_m1_lay
C = lamda**2*re/pi*sl_m2_lay
g_0 = sin(theta*pi/180.0)
#M = c_[ones(sl_c.shape), zeros(sl_c.shape), zeros(sl_c.shape)]
chi, non_mag, mpy = lib.xrmr.create_chi(g_0, lamda, A, 0.0*A,
B, C, M, d)
chi = tuple([c.sum(0) for c in chi[0] + chi[1] + chi[2]])
#print chi[0]
#M = c_[(M[:,0][:,newaxis]*sl_m1_tmp).sum(0)/sl_m1,
# (M[:,1][:,newaxis]*sl_m1_tmp).sum(0)/sl_m1,
# (M[:,2][:,newaxis]*sl_m1_tmp).sum(0)/sl_m1
# ].real
#print M
#print sl_m2
#print sigma_c, sigma_m, A, B, d
#print 'Uncompressed:', z.shape
if sample.getCompress() == sample_string_choices['compress'][0]:
#Compressing the profile..
#z, pdens_c, pdens_m = edm.compress_profile2(z, sl_c, sl_m1, sample.getDsld_max())
dsld_max = sample.getDsld_max()
dchi_max = dsld_max*lamda**2*re/pi
dsld_offdiag_max = sample.getDsld_offdiag_max()
dchi_od_max = dsld_offdiag_max*lamda**2*re/pi
#z, pdens = edm.compress_profile_n(z, (sl_c, sl_m1, sl_m2),
# (dsld_max, dsld_max, dsld_max))
#sl_c, sl_m1, sl_m2 = pdens
#print chi[0].shape
index, z = edm.compress_profile_index_n(z, chi,
(dchi_max, dchi_od_max, dchi_od_max,
dchi_od_max, dsld_max, dchi_od_max,
dchi_od_max, dchi_od_max, dsld_max))
reply = edm.create_compressed_profile((sl_c, sl_m1, sl_m2) +
chi,
index)
sl_c, sl_m1, sl_m2, chi_xx, chi_xy, chi_xz, chi_yx, chi_yy, chi_yz, chi_zx, chi_zy, chi_zz = reply
non_mag = ((abs(chi_xy) < mag_limit)
*(abs(chi_xz) < mag_limit)
*(abs(chi_yz) < mag_limit))
mpy = (abs(chi_yz) < mpy_limit)*(abs(chi_xy) < mpy_limit)*bitwise_not(non_mag)
#print mpy
chi = ((chi_xx, chi_xy, chi_xz),(chi_yx, chi_yy, chi_yz),(chi_zx, chi_zy, chi_zz))
else:
(chi_xx, chi_xy, chi_xz, chi_yx, chi_yy, chi_yz,chi_zx, chi_zy, chi_zz) = chi
non_mag = ((abs(chi_xy) < mag_limit)
*(abs(chi_xz) < mag_limit)
*(abs(chi_yz) < mag_limit))
non_mag[-1] = True
mpy = (abs(chi_yz) < mpy_limit)*(abs(chi_xy) < mpy_limit)*bitwise_not(non_mag)
chi = ((chi_xx, chi_xy, chi_xz),(chi_yx, chi_yy, chi_yz),(chi_zx, chi_zy, chi_zz))
d = r_[z[1:] - z[:-1],1]
#print 'Compressed: ', z.shape, sl_c.shape
#print 'WARNING: M is ignored!'
else:
#print 'test'
re = 2.8179402894e-5
A = -lamda**2*re/pi*sl_c
B = lamda**2*re/pi*sl_m1
C = lamda**2*re/pi*sl_m2
g_0 = sin(theta*pi/180.0)
#M = c_[ones(sl_c.shape), zeros(sl_c.shape), zeros(sl_c.shape)]
chi, non_mag, mpy = lib.xrmr.create_chi(g_0, lamda, A, 0.0*A,
B, C, M, d)
#chi = ((chi_xx, chi_xy, chi_xz),(chi_yx, chi_yy, chi_yz),(chi_zx, chi_zy, chi_zz))
#sl_c = pdens_c
#sl_m1 = pdens_m
#sl_m2 = sl_m1*0
#print d.shape, A.shape
#print A, B
#M = c_[ones(sl_c.shape), zeros(sl_c.shape), zeros(sl_c.shape)]
#print 'Sl_m2: ', sl_m2, 'END'
return d, sl_c, sl_m1, sl_m2, M, chi, non_mag, mpy
def reflectivity_xmag(sample, instrument, theta):
lamda = instrument.getWavelength()
parameters = sample.resolveLayerParameters()
d, sl_c, sl_m1, sl_m2, M, chi, non_mag, mpy = compose_sld(sample, instrument, theta)
#re = 2.8179402894e-5
#A = -lamda**2*re/pi*sl_c
#B = lamda**2*re/pi*sl_m1
#C = lamda**2*re/pi*sl_m2
g_0 = sin(theta*pi/180.0)
#print A[::-1], B[::-1], d[::-1], M[::-1], lamda, g_0
theory = instrument.getTheory()
# Full theory
if theory == 0 or theory == instrument_string_choices['theory'][0]:
if (Buffer.parameters != parameters or Buffer.coords != instrument.getCoords()
or any(not_equal(Buffer.g_0,g_0)) or Buffer.wavelength != lamda):
#W = lib.xrmr.calc_refl(g_0, lamda, A[::-1], 0.0*A[::-1], B[::-1], C[::-1], M[::-1], d[::-1])
#print 'Calc W'
W = lib.xrmr.do_calc(g_0, lamda, chi, d, non_mag, mpy)
Buffer.W = W
Buffer.parameters = parameters.copy()
Buffer.coords = instrument.getCoords()
Buffer.g_0 = g_0.copy()
Buffer.wavelength = lamda
else:
#print 'Reusing W'
W = Buffer.W
trans = ones(W.shape, dtype = complex128); trans[0,1] = 1.0J; trans[1,1] = -1.0J; trans = trans/sqrt(2)
Wc = lib.xrmr.dot2(trans, lib.xrmr.dot2(W, lib.xrmr.inv2(trans)))
#Different polarization channels:
pol = instrument.getPol()
if pol == 0 or pol == instrument_string_choices['pol'][0]:
# circ +
R = abs(Wc[0,0])**2 + abs(Wc[0,1])**2
elif pol == 1 or pol == instrument_string_choices['pol'][1]:
# circ -
R = abs(Wc[1,1])**2 + abs(Wc[1,0])**2
elif pol == 2 or pol == instrument_string_choices['pol'][2]:
# tot
R = (abs(W[0,0])**2 + abs(W[1,0])**2 + abs(W[0,1])**2 + abs(W[1,1])**2)/2
elif pol == 3 or pol == instrument_string_choices['pol'][3]:
# ass
R = 2*(W[0,0]*W[0,1].conj() + W[1,0]*W[1,1].conj()).imag/(abs(W[0,0])**2 + abs(W[1,0])**2 + abs(W[0,1])**2 + abs(W[1,1])**2)
elif pol == 4 or pol == instrument_string_choices['pol'][4]:
# sigma
R = abs(W[0,0])**2 + abs(W[0,1])**2
elif pol == 5 or pol == instrument_string_choices['pol'][5]:
# pi
R = abs(W[1,0])**2 + abs(W[1,1])**2
else:
raise ValueError('Variable pol has an unvalid value')
# Simplified theory
elif theory == 1 or theory == instrument_string_choices['theory'][1]:
pol = instrument.getPol()
re = 2.82e-13*1e2/1e-10
if pol == 0 or pol == instrument_string_choices['pol'][0]:
# circ +
chi_temp = chi[0][0][:,newaxis] - 1.0J*chi[2][1][:,newaxis]*cos(theta*pi/180)
n = 1 + chi_temp/2.0
#print n.shape, theta.shape, d.shape
R = Paratt.Refl_nvary2(theta, lamda, n, d, zeros(d.shape))
elif pol == 1 or pol == instrument_string_choices['pol'][1]:
# circ -
chi_temp = chi[0][0][:,newaxis] + 1.0J*chi[2][1][:,newaxis]*cos(theta*pi/180)
n = 1 + chi_temp/2.0
R = Paratt.Refl_nvary2(theta, lamda*ones(theta.shape), n, d, zeros(d.shape))
elif pol == 2 or pol == instrument_string_choices['pol'][2]:
# tot
chi_temp = chi[0][0][:,newaxis] + 1.0J*chi[2][1][:,newaxis]*cos(theta*pi/180)
n = 1 + chi_temp/2.0
Rm = Paratt.Refl_nvary2(theta, lamda*ones(theta.shape), n, d, zeros(d.shape))
chi_temp = chi[0][0][:,newaxis] - 1.0J*chi[2][1][:,newaxis]*cos(theta*pi/180)
n = 1 + chi_temp/2.0
Rp = Paratt.Refl_nvary2(theta, lamda*ones(theta.shape), n, d, zeros(d.shape))
R = (Rp + Rm)/2.0
#raise ValueError('Variable pol has an unvalid value')
elif pol == 3 or pol == instrument_string_choices['pol'][3]:
# ass
chi_temp = chi[0][0][:,newaxis] + 1.0J*chi[2][1][:,newaxis]*cos(theta*pi/180)
n = 1 + chi_temp/2.0
Rm = Paratt.Refl_nvary2(theta, lamda*ones(theta.shape), n, d, zeros(d.shape))
chi_temp = chi[0][0][:,newaxis] - 1.0J*chi[2][1][:,newaxis]*cos(theta*pi/180)
n = 1 + chi_temp/2.0
Rp = Paratt.Refl_nvary2(theta, lamda*ones(theta.shape), n, d, zeros(d.shape))
R = (Rp - Rm)/(Rp + Rm)
#raise ValueError('Variable pol has an unvalid value')
else:
raise ValueError('Variable pol has an unvalid value')
else:
raise ValueError('Variable theory has an unvalid value')
return R
def footprint_correction(instrument, theta):
foocor = 1.0
footype = instrument.getFootype()
beamw = instrument.getBeamw()
samlen = instrument.getSamplelen()
if footype == 0 or footype == instrument_string_choices['footype'][0]:
foocor = 1.0
elif footype == 1 or footype == instrument_string_choices['footype'][1]:
foocor = GaussIntensity(theta, samlen/2.0, samlen/2.0, beamw)
elif footype == 2 or footype == instrument_string_choices['footype'][2]:
foocor = SquareIntensity(theta, samlen, beamw)
else:
raise ValueError('Variable footype has an unvalid value')
return foocor
def correct_reflectivity(R, instrument, foocor, TwoThetaQz, weight):
restype = instrument.getRestype()
if restype == 0 or restype == instrument_string_choices['restype'][0]:
R = R[:]*foocor
elif restype == 1 or restype == instrument_string_choices['restype'][1]:
R = ConvoluteFast(TwoThetaQz,R[:]*foocor, instrument.getRes(),\
range = instrument.getResintrange())
elif restype == 2 or restype == instrument_string_choices['restype'][2]:
R = ConvoluteResolutionVector(TwoThetaQz,R[:]*foocor, weight)
elif restype == 3 or restype == instrument_string_choices['restype'][3]:
R = ConvoluteFastVar(TwoThetaQz,R[:]*foocor, instrument.getRes(),\
range = instrument.getResintrange())
else:
raise ValueError('Variable restype has an unvalid value')
return R
SimulationFunctions = {'Specular':Specular,\
'OffSpecular':OffSpecular,\
'SLD': SLD_calculations}
import lib.refl as Refl
(Instrument, Layer, Stack, Sample) = Refl.MakeClasses(InstrumentParameters,\
LayerParameters,StackParameters,\
SampleParameters, SimulationFunctions, ModelID)
if __name__=='__main__':
pass
|
joshp123/genx
|
models/xmag.py
|
Python
|
gpl-3.0
| 26,324
|
[
"Gaussian"
] |
b4c6f57683121ea79158c3381eecec98220f8d4d026f019ec60dd4bcb3feed53
|
#!/usr/bin/env python
#Copyright (C) 2013 by Glenn Hickey
#
#Released under the MIT license, see LICENSE.txt
import unittest
import sys
import os
import argparse
import logging
import itertools
import copy
from teHmm.common import runShellCommand
from teHmm.common import runParallelShellCommands
from teHmm.track import TrackList
from pybedtools import BedTool, Interval
from teHmm.common import addLoggingOptions, setLoggingFromOptions, logger
from teHmm.common import getLogLevelString
""" This script automates evaluating the hmm te model by doing training,
parsing, comparing back to truth, and summerizing the resutls in a table all
in one. It can run the same logic on multiple input beds at once in parallel
(by using, say, a wildcard argument for inBeds. It also optionally repeats the
evaluation for subsets of the input tracks.
Independent processes are run in parallel using Python's process pool with the
maximum number of parallel processes limited by the --numProc argument
"""
def main(argv=None):
if argv is None:
argv = sys.argv
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description="Train, evalaute, then compare hmm model on input")
parser.add_argument("trainingTracksInfo", help="Path of Tracks Info file "
"containing paths to genome annotation tracks used "
"for training")
parser.add_argument("outputDir", help="directory to write output")
parser.add_argument("inBeds", nargs="*", help="list of training beds")
parser.add_argument("--evalTracksInfo", help="Path of Tracks Info file "
"containing paths to genome annotation tracks used"
" for evaluation (only need if different from"
" trainingTracksInfo", default=None)
parser.add_argument("--numProc", help="Max number of processors to use",
type=int, default=1)
parser.add_argument("--allTrackCombinations", help="Rerun with all"
" possible combinations of tracks from the input"
" tracksInfo file. Note that this number gets big"
" pretty fast.", action = "store_true", default= False)
parser.add_argument("--emStates", help="By default the supervised mode"
" of teHmmTrain is activated. This option overrides"
" that and uses the EM mode and the given number of "
"states instead", type=int, default=None)
parser.add_argument("--cross", help="Do 50/50 cross validation by training"
" on first half input and validating on second",
action="store_true", default=False)
parser.add_argument("--emFac", help="Normalization factor for weighting"
" emission probabilities because when there are "
"many tracks, the transition probabilities can get "
"totally lost. 0 = no normalization. 1 ="
" divide by number of tracks. k = divide by number "
"of tracks / k", type=int, default=0)
parser.add_argument("--mod", help="Path to trained model. This will "
"bypass the training phase that would normally be done"
" and just skip to the evaluation. Note that the user"
" must make sure that the trained model has the "
"states required to process the input data",
default = None)
parser.add_argument("--iter", help="Number of EM iterations. Needs to be"
" used in conjunction with --emStates to specify EM"
" training",
type = int, default=None)
parser.add_argument("--initTransProbs", help="Path of text file where each "
"line has three entries: FromState ToState Probability"
". This file (all other transitions get probability 0)"
" is used to specifiy the initial transition model."
" The names and number of states will be initialized "
"according to this file (overriding --numStates)",
default = None)
parser.add_argument("--fixTrans", help="Do not learn transition parameters"
" (best used with --initTransProbs)",
action="store_true", default=False)
parser.add_argument("--initEmProbs", help="Path of text file where each "
"line has four entries: State Track Symbol Probability"
". This file (all other emissions get probability 0)"
" is used to specifiy the initial emission model. All "
"states specified in this file must appear in the file"
" specified with --initTransProbs (but not vice versa).",
default = None)
parser.add_argument("--fixEm", help="Do not learn emission parameters"
" (best used with --initEmProbs)",
action="store_true", default=False)
parser.add_argument("--initStartProbs", help="Path of text file where each "
"line has two entries: State Probability"
". This file (all other start probs get probability 0)"
" is used to specifiy the initial start dist. All "
"states specified in this file must appear in the file"
" specified with --initTransProbs (but not vice versa).",
default = None)
parser.add_argument("--fixStart", help="Do not learn start parameters"
" (best used with --initStartProbs)",
action="store_true", default=False)
parser.add_argument("--forceTransProbs",
help="Path of text file where each "
"line has three entries: FromState ToState Probability"
". These transition probabilities will override any "
" learned probabilities after training (unspecified "
"will not be set to 0 in this case. the learned values"
" will be kept, but normalized as needed" ,
default=None)
parser.add_argument("--forceEmProbs", help="Path of text file where each "
"line has four entries: State Track Symbol Probability"
". These "
"emission probabilities will override any learned"
" probabilities after training (unspecified "
"will not be set to 0 in this case. the learned values"
" will be kept, but normalized as needed." ,
default = None)
parser.add_argument("--flatEm", help="Use a flat emission distribution as "
"a baseline. If not specified, the initial emission "
"distribution will be randomized by default. Emission"
" probabilities specified with --initEmpProbs or "
"--forceEmProbs will never be affected by randomizaiton"
". The randomization is important for Baum Welch "
"training, since if two states dont have at least one"
" different emission or transition probability to begin"
" with, they will never learn to be different.",
action="store_true", default=False)
parser.add_argument("--emRandRange", help="When randomly initialzing a"
" multinomial emission distribution, constrain"
" the values to the given range (pair of "
"comma-separated numbers). Overridden by "
"--initEmProbs and --forceEmProbs when applicable."
" Completely overridden by --flatEm (which is equivalent"
" to --emRandRange .5,.5.). Actual values used will"
" always be normalized.", default=None)
parser.add_argument("--mandTracks", help="Mandatory track names for use "
"with --allTrackCombinations in comma-separated list",
default=None)
parser.add_argument("--combinationRange", help="in form MIN,MAX: Only "
"explore track combination in given (closed) range. "
"A more refined version of --allTrackCombinations.",
default=None)
parser.add_argument("--supervised", help="Use name (4th) column of "
"<traingingBed> for the true hidden states of the"
" model. Transition parameters will be estimated"
" directly from this information rather than EM."
" NOTE: The number of states will be determined "
"from the bed.",
action = "store_true", default = False)
parser.add_argument("--segment", help="Input bed files are also used to "
"segment data. Ie teHmmTrain is called with --segment"
" set to the input file. Not currently working with "
" --supervised",
action = "store_true", default=False)
parser.add_argument("--segLen", help="Effective segment length used for"
" normalizing input segments (specifying 0 means no"
" normalization applied) in training", type=int,
default=None)
parser.add_argument("--truth", help="Use specifed file instead of "
"input file(s) for truth comparison. Makes sense"
" when --segment is specified and only one input"
" bed specified", default = None)
parser.add_argument("--eval", help="Bed file used for evaluation. It should"
" cover same region in same order as --truth. Option "
"exists mostly to specify segmentation of --truth",
default=None)
parser.add_argument("--seed", help="Seed for random number generator"
" which will be used to initialize emissions "
"(if --flatEM and --supervised not specified)",
default=None, type=int)
parser.add_argument("--reps", help="Number of training replicates (with "
" different"
" random initializations) to run. The replicate"
" with the highest likelihood will be chosen for the"
" output", default=None, type=int)
parser.add_argument("--numThreads", help="Number of threads to use when"
" running training replicates (see --rep) in parallel.",
type=int, default=None)
parser.add_argument("--emThresh", help="Threshold used for convergence"
" in baum welch training. IE delta log likelihood"
" must be bigger than this number (which should be"
" positive) for convergence", type=float,
default=None)
parser.add_argument("--fit", help="Run fitStateNames.py to automap names"
" before running comparison", action="store_true",
default=False)
parser.add_argument("--fitOpts", help="Options to pass to fitStateNames.py"
" (only effective if used with --fit)", default=None)
parser.add_argument("--saveAllReps", help="Save all replicates (--reps)"
" models to disk, instead of just the best one"
". Format is <outputModel>.repN. There will be "
" --reps -1 such models saved as the best output"
" counts as a replicate. Comparison statistics"
" will be generated for each rep.",
action="store_true", default=False)
parser.add_argument("--maxProb", help="Gaussian distributions and/or"
" segment length corrections can cause probability"
" to *decrease* during BW iteration. Use this option"
" to remember the parameters with the highest probability"
" rather than returning the parameters after the final "
"iteration.", action="store_true", default=False)
parser.add_argument("--maxProbCut", help="Use with --maxProb option to stop"
" training if a given number of iterations go by without"
" hitting a new maxProb", default=None, type=int)
parser.add_argument("--transMatEpsilons", help="By default, epsilons are"
" added to all transition probabilities to prevent "
"converging on 0 due to rounding error only for fully"
" unsupervised training. Use this option to force this"
" behaviour for supervised and semisupervised modes",
action="store_true", default=False)
addLoggingOptions(parser)
args = parser.parse_args()
setLoggingFromOptions(args)
logOps = "--logLevel %s" % getLogLevelString()
if args.logFile is not None:
logOps += " --logFile %s" % args.logFile
if not os.path.exists(args.outputDir):
os.makedirs(args.outputDir)
if args.evalTracksInfo is None:
args.evalTracksInfo = args.trainingTracksInfo
trainingTrackList = TrackList(args.trainingTracksInfo)
evalTrackList = TrackList(args.evalTracksInfo)
checkTrackListCompatible(trainingTrackList, evalTrackList)
sizeRange = (len(trainingTrackList), len(trainingTrackList) + 1)
if args.allTrackCombinations is True:
sizeRange = (1, len(trainingTrackList) + 1)
if args.combinationRange is not None:
toks = args.combinationRange.split(",")
sizeRange = int(toks[0]),int(toks[1]) + 1
logger.debug("manual range (%d, %d) " % sizeRange)
mandTracks = set()
if args.mandTracks is not None:
mandTracks = set(args.mandTracks.split(","))
logger.debug("mandatory set %s" % str(mandTracks))
trainFlags = ""
if args.emStates is not None:
trainFlags += " --numStates %d" % args.emStates
if args.supervised is True:
trainFlags += " --supervised"
if args.segment is True:
raise RuntimeError("--supervised not currently compatible with "
"--segment")
trainFlags += " --emFac %d" % args.emFac
if args.forceEmProbs is not None:
trainFlags += " --forceEmProbs %s" % args.forceEmProbs
if args.iter is not None:
assert args.emStates is not None or args.initTransProbs is not None
trainFlags += " --iter %d" % args.iter
if args.initTransProbs is not None:
trainFlags += " --initTransProbs %s" % args.initTransProbs
if args.initEmProbs is not None:
trainFlags += " --initEmProbs %s" % args.initEmProbs
if args.fixEm is True:
trainFlags += " --fixEm"
if args.initStartProbs is not None:
trainFlags += " --initStartProbs %s" % args.initStartProbs
if args.fixStart is True:
trainFlags += " --fixStart"
if args.forceTransProbs is not None:
trainFlags += " --forceTransProbs %s" % args.forceTransProbs
if args.forceEmProbs is not None:
trainFlags += " --forceEmProbs %s" % args.forceEmProbs
if args.flatEm is True:
trainFlags += " --flatEm"
if args.emRandRange is not None:
trainFlags += " --emRandRange %s" % args.emRandRange
if args.segLen is not None:
trainFlags += " --segLen %d" % args.segLen
if args.seed is not None:
trainFlags += " --seed %d" % args.seed
if args.reps is not None:
trainFlags += " --reps %d" % args.reps
if args.numThreads is not None:
trainFlags += " --numThreads %d" % args.numThreads
if args.emThresh is not None:
trainFlags += " --emThresh %f" % args.emThresh
if args.saveAllReps is True:
trainFlags += " --saveAllReps"
if args.maxProb is True:
trainFlags += " --maxProb"
if args.transMatEpsilons is True:
trainFlags += " --transMatEpsilons"
if args.maxProbCut is not None:
trainFlags += " --maxProbCut %d" % args.maxProbCut
# write out command line for posteriorty's sake
if not os.path.exists(args.outputDir):
os.makedirs(args.outputDir)
cmdPath = os.path.join(args.outputDir, "teHmmBenchmark_cmd.txt")
cmdFile = open(cmdPath, "w")
cmdFile.write(" ".join(argv) + "\n")
cmdFile.close()
#todo: try to get timing for each command
commands = []
rows = dict()
for pn, pList in enumerate(subsetTrackList(trainingTrackList, sizeRange,
mandTracks)):
if len(pList) == len(trainingTrackList):
outDir = args.outputDir
else:
outDir = os.path.join(args.outputDir, "perm%d" % pn)
if not os.path.exists(outDir):
os.makedirs(outDir)
trainingTrackPath = os.path.join(outDir, "training_tracks.xml")
evalTrackPath = os.path.join(outDir, "eval_tracks.xml")
for maskTrack in trainingTrackList.getMaskTracks():
pList.addTrack(copy.deepcopy(maskTrack))
pList.saveXML(trainingTrackPath)
epList = TrackList()
for track in pList:
t = copy.deepcopy(evalTrackList.getTrackByName(track.getName()))
epList.addTrack(t)
for maskTrack in trainingTrackList.getMaskTracks():
epList.addTrack(copy.deepcopy(maskTrack))
epList.saveXML(evalTrackPath)
for inBed in args.inBeds:
base = os.path.basename(inBed)
truthBed = inBed
testBed = inBed
if args.cross is True:
truthBed = os.path.join(outDir,
os.path.splitext(base)[0] +
"_truth_temp.bed")
testBed = os.path.join(outDir,
os.path.splitext(base)[0] +
"_test_temp.bed")
splitBed(inBed, truthBed, testBed)
# train
if args.mod is not None:
modPath = args.mod
command = "ls %s" % modPath
else:
modPath = os.path.join(outDir,
os.path.splitext(base)[0] + ".mod")
command = "teHmmTrain.py %s %s %s %s %s" % (trainingTrackPath,
truthBed,
modPath,
logOps,
trainFlags)
if args.segment is True:
command += " --segment %s" % truthBed
# view
viewPath = os.path.join(outDir,
os.path.splitext(base)[0] + "_view.txt")
command += " && teHmmView.py %s > %s" % (modPath, viewPath)
# evaluate
numReps = 1
if args.reps is not None and args.saveAllReps is True:
numReps = args.reps
assert numReps > 0
missed = 0
# little hack to repeat evaluation for each training replicate
for repNum in xrange(-1, numReps-1):
if repNum == -1:
repSuffix = ""
else:
repSuffix = ".rep%d" % repNum
evalBed = os.path.join(outDir,
os.path.splitext(base)[0] + "_eval.bed" +
repSuffix)
hmmEvalInputBed = testBed
if args.eval is not None:
hmmEvalInputBed = args.eval
bicPath = os.path.join(outDir,
os.path.splitext(base)[0] + "_bic.txt" +
repSuffix)
command += " && teHmmEval.py %s %s %s --bed %s %s --bic %s" % (
evalTrackPath,
modPath + repSuffix,
hmmEvalInputBed,
evalBed,
logOps,
bicPath)
zin = True
if args.segment is True:
command += " --segment"
# fit
compTruth = testBed
if args.truth is not None:
compTruth = args.truth
compareInputBed = evalBed
if args.fit is True:
fitBed = os.path.join(outDir,
os.path.splitext(base)[0] + "_eval_fit.bed" +
repSuffix)
command += " && fitStateNames.py %s %s %s --tl %s" % (compTruth,
evalBed,
fitBed,
evalTrackPath)
if args.fitOpts is not None:
command += " " + args.fitOpts
compareInputBed = fitBed
# compare
compPath = os.path.join(outDir,
os.path.splitext(base)[0] + "_comp.txt" +
repSuffix)
command += " && compareBedStates.py %s %s --tl %s > %s" % (
compTruth,
compareInputBed,
evalTrackPath,
compPath)
# make table row
if repSuffix == "":
rowPath = os.path.join(outDir,
os.path.splitext(base)[0] + "_row.txt")
if inBed in rows:
rows[inBed].append(rowPath)
else:
rows[inBed] = [rowPath]
command += " && scrapeBenchmarkRow.py %s %s %s %s %s" % (
args.trainingTracksInfo,
trainingTrackPath,
evalBed,
compPath,
rowPath)
# remember command
inCmdPath = os.path.join(outDir,
os.path.splitext(base)[0] + "_cmd.txt")
inCmdFile = open(inCmdPath, "w")
inCmdFile.write(command + "\n")
inCmdFile.close()
commands.append(command)
runParallelShellCommands(commands, args.numProc)
writeTables(args.outputDir, rows)
def subsetTrackList(trackList, sizeRange, mandTracks):
""" generate tracklists of all combinations of tracks in the input list
optionally using size range to limit the different sizes tried. so, for
example, given input list [t1, t2, t3] and sizeRange=None this
will gneerate [t1] [t2] [t3] [t1,t2] [t1,t3] [t2,t3] [t1,t2,t3] """
assert sizeRange[0] > 0
sizeRange = (sizeRange[0], min(sizeRange[1], len(trackList) + 1))
for outLen in xrange(*sizeRange):
for perm in itertools.combinations([x for x in xrange(len(trackList))],
outLen):
permList = TrackList()
mandFound = 0
for trackNo in perm:
track = copy.deepcopy(trackList.getTrackByNumber(trackNo))
permList.addTrack(track)
if track.getName() in mandTracks:
mandFound += 1
if mandFound == len(mandTracks):
yield permList
def splitBed(inBed, outBed1, outBed2):
""" Used for cross validation option. The first half in input bed gets
written to outBed1 and the second half to outBed2"""
inFile = open(inBed, "r")
numLines = len([x for x in inFile])
inFile.close()
inFile = open(inBed, "r")
cutLine = numLines / 2
outFile1 = open(outBed1, "w")
outFile2 = open(outBed2, "w")
for lineNo, line in enumerate(inFile):
if numLines == 1 or lineNo < cutLine:
outFile1.write(line)
if numLines == 1 or lineNo >= cutLine:
outFile2.write(line)
inFile.close()
outFile1.close()
outFile2.close()
def checkTrackListCompatible(trainingTrackList, evalTrackList):
""" Now that we allow a different trackList to be used for training and
eval, we need to check to make sure that everything's the same but the
paths"""
for track1, track2 in zip(trainingTrackList, evalTrackList):
assert track1.getName() == track2.getName()
assert track1.getNumber() == track2.getNumber()
assert track1.getScale() == track2.getScale()
assert track1.getLogScale() == track2.getLogScale()
assert track1.getDist() == track2.getDist()
def writeTables(outDir, rows):
""" Write CSV table for each input bed that was scraped from up from the
output using scrapeBenchmarkRow.py """
for inBed, rowPaths in rows.items():
name = os.path.splitext(os.path.basename(inBed))[0]
tablePath = os.path.join(outDir, name + "_table.csv")
tableFile = open(tablePath, "w")
for i, rowPath in enumerate(rowPaths):
rowFile = open(rowPath, "r")
rowLines = [line for line in rowFile]
rowFile.close()
if i == 0:
tableFile.write(rowLines[0])
tableFile.write(rowLines[1])
tableFile.close()
if __name__ == "__main__":
sys.exit(main())
|
glennhickey/teHmm
|
bin/teHmmBenchmark.py
|
Python
|
mit
| 26,622
|
[
"Gaussian"
] |
cf4cd7cab6ef8682d654d6d09abd8022fd934f07ebb96ae5ce787e0199c0fde5
|
#!/usr/bin/env python
import os, sys, glob, gzip, math
from optparse import OptionParser
# Make sure to set the proper PYTHONPATH before running this script:
import FileUtils
import Fasta, AnalyzeSequence, Variant
def getPercentiles(hist = {}, pctiles = [ 1,5,10,25, 50, 75, 90, 95, 99]):
vals = sorted(hist.keys())
cum = {}
prevk = 0
for idx, k in enumerate(vals):
cum[k]=hist[k]
#print "cum:",k,hist[k]
if idx>0:
cum[k] += cum[prevk]
prevk = k
if not cum.has_key(0):
cum[0] = 0
tot = cum[prevk]
iles = pctiles
ilidx = 0
iles_k = [0]*len(iles)
num_pct = len(iles)
for idx, k in enumerate(vals):
if ilidx<num_pct and cum[k]>float(iles[ilidx])/100.0*float(tot):
iles_k[ilidx]=k
ilidx += 1
return iles_k
def parseGLFFileLine(dat={}, isHomozygous = False):
pos = dat [ 'realigned_position' ];
alleles = ['*'];
alleles.extend(dat[ 'nref_all'].split(','));
glfstr = dat ['glf'].split(',');
indidx = dat ['indidx'];
chr = dat [ 'tid' ];
# print 'alleles: ', alleles
genLiks = {}
for genstr in glfstr:
gd = genstr.split(':');
if len(gd)!=2:
return None
# print 'gd: ', gd[0], gd[1]
all = gd[0].split('/');
# print 'all: ', all[0], all[1], len(all)
if len(all)!=2 or int(all[0])>=len(alleles) or int(all[1])>=len(alleles):
return None
genotype = str(alleles[int(all[0]) ])+'/' + str(alleles[int(all[1])])
genLiks[genotype]=gd[1];
return GenotypeLikelihood.GenotypeLikelihood(chr=chr, pos=pos, genLiks=genLiks, miscData =
dat, isHomozygous = isHomozygous)
def getVCFString(glf = {}, fa = None, maxHPLen = 10, addFilters = [], useFRFilter = 'yes', filterQual = 20):
# create VCF string for variant
#fv.write("#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\tFORMAT\t%s\n" % (sampleID))
filters = []
if useFRFilter == 'yes':
if not glf['pass_filterFR']:
filters.append('fr0')
pos = int(glf['pos'])
chr = glf['chr']
seq = fa.get(chr, pos+1-25,50)
hplen = AnalyzeSequence.HomopolymerLength(seq=seq, pos = 25)
#if glf['vartype']=="snp":
# altseq = glf['nref_all'][3]
# refseq = fa.get(chr,pos+1,1)[0]
# report_pos = pos+1
#
#else:
if True:
nref_all = glf['nref_all']
if nref_all.find(',')!=-1:
raise NameError('Internal error')
report_pos = pos
gtAlleles = glf['genotype'].split('/')
max_del_len = 0
scanAlleles = gtAlleles[:]
scanAlleles.append(nref_all)
for gta in scanAlleles:
var = Variant.Variant(varString = gta)
if var.type == "del":
if var.length>max_del_len:
max_del_len = var.length
seqlen = 1+max_del_len# 1 base on either side
refseq = ''.join(fa.get(chr, report_pos, seqlen))
vnref = Variant.Variant(varString = nref_all)
if vnref.type == 'del':
altseq = refseq[0]+refseq[(1+vnref.length):]
elif vnref.type == 'ins':
altseq = refseq[0]+vnref.seq+refseq[1:]
elif vnref.type == 'snp':
altseq = refseq[0]+vnref.seq[0]+refseq[2:]
else:
raise NameError('Unknown allele')
# recode genotype
altseqs = [altseq]
altseq_to_type = {}
altseq_to_type[altseq] = vnref.type
rec_gta = []
for gta in gtAlleles:
g_code = -1
vnref = Variant.Variant(varString = gta)
if vnref.type == 'del':
g_altseq = refseq[0]+refseq[(1+vnref.length):]
elif vnref.type == 'ins':
g_altseq = refseq[0]+vnref.seq+refseq[1:]
elif vnref.type == 'snp':
g_altseq = refseq[0]+vnref.seq[0]+refseq[2:]
elif vnref.type == 'ref':
g_altseq = refseq[:]
g_code = 0
else:
raise NameError('Unknown allele')
if g_code == -1:
if g_altseq not in altseqs:
altseqs.append(g_altseq)
altseq_to_type[g_altseq] = vnref.type
g_code = altseqs.index(g_altseq)+1
rec_gta.append("%d" % g_code)
rec_gt = '/'.join(rec_gta)
# check if there is an indel in the altseqs
onlySNPs = True
for alts in altseqs:
if altseq_to_type[alts] != "snp":
onlySNPs = False
# if there are no indels, only SNPs, then move position by one
if onlySNPs:
report_pos += 1
refseq = ''.join(fa.get(chr,report_pos,1))
tmp_altseqs = altseqs[:]
altseqs = []
for alt in tmp_altseqs:
altseqs.append(alt[1:])
del tmp_altseqs
if hplen>maxHPLen:
filters.append("hp%d" % maxHPLen)
if float(glf['post_prob_all'])<filterQual:
filters.append('q%d' % filterQual)
if addFilters != []:
filters.extend(addFilters)
if filters== []:
filterStr = 'PASS'
else:
filterStr = ';'.join(filters)
infoStr = "DP=%d;NF=%d;NR=%d;HP=%d" % (int(glf['num_hap_reads']), int(glf['num_cover_forward']), int(glf['num_cover_reverse']), hplen)
#VCF header string
#fv.write("#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\tFORMAT\t%s\n" % (sampleID))
rstr = "%s\t%s\t.\t%s\t%s\t%s\t%s\t%s\t%s\t%s" % (glf['chr'], report_pos, refseq, ','.join(altseqs), "%1.2f" % (glf['post_prob_all']), filterStr, infoStr,"GT:GQ","%s:%d" % (rec_gt, int(glf['post_prob_geno'])))
return rstr
def processPooledGLFFiles(bamFilesFile = '', glfFilesFile = '', refFile = '', outputVCFFile = '', maxHPLen = 10, minForwardReverse = 1, minDist = 10, dbSNPWindow = 50, newVarCov = False, doNotFilterOnFR = False, filterQual = 20, numSamples = 1, numBamFiles = 1):
coverageRange = [20, 10000]
# read file with glf files
allFiles = []
headerLabels = []
f = open(glfFilesFile,'r')
idx = 0
for line in f.readlines():
idx += 1
dat = line.rstrip("\n").split()
for gf in dat:
if not os.path.exists(gf):
sys.stderr.write("WARNING: GLF file %s does not exist.\n" % gf)
else:
if os.path.splitext(gf)[-1]=='.gz':
fgf = gzip.open(gf,'r')
else:
fgf = open(gf,'r')
line = fgf.readline()
if line == '':
sys.stderr.write("WARNING: GLF file %s is empty.\n" % gf)
else:
d = line.rstrip("\n").split()
if headerLabels == []:
headerLabels = d[:]
allFiles.append(gf)
else:
if d != headerLabels:
sys.stderr.write("Inconsistent header in GLF file %s\n" % gf)
else:
allFiles.append(gf)
fgf.close()
f.close()
fa = Fasta.Fasta(fname = refFile)
# read precall files
# make hash table [pos][variant][fname]
numInds = numSamples
minFreq = 1.0/(float(2*numInds)*5)
nf = 0
try:
realpos_col = headerLabels.index('realigned_position')
var_col = headerLabels.index('nref_all')
# apply filters across individuals
tcFilter = "tc%d" % minDist
col_num_reads = headerLabels.index('num_reads')
col_num_forward_old = headerLabels.index('num_cover_forward')
col_num_reverse_old = headerLabels.index('num_cover_reverse')
col_num_forward = headerLabels.index('var_coverage_forward')
col_num_reverse = headerLabels.index('var_coverage_reverse')
col_post_prob = headerLabels.index('post_prob_variant')
chr_col = headerLabels.index('tid')
idx_col = headerLabels.index('indidx')
ana_col = headerLabels.index('analysis_type')
except ValueError:
raise NameError("GLF files are corrupt. Could not find all required columns.")
pass_filters = {}
varStat = {}
nr = 0
num_pass = 0
# read depth histo
rdhist = {}
for glffile in allFiles:
fglf = FileUtils.FileWithHeader(fname = glffile, mode = 'r', joinChar = ' ')
print "Reading", glffile
done = False
while True:
pos = -1
var = ''
nr += 1
if nr % 10000 == 9999:
print "Number of lines read:",nr+1
num_ind_with_data = 0
tot_coverage = 0
tot_num_forward = 0
tot_num_reverse = 0
tot_num_forward_old = 0
tot_num_reverse_old = 0
skip = False
for fidx in range(0,numBamFiles):
try:
dat = fglf.readlineList()
except IOError:
sys.stderr.write("WARNING: IOError in %s\n" % glffile)
done = True
break
if dat == []:
done = True
break
if dat[realpos_col] == 'NA':
skip = True
break
if dat[ana_col] != "singlevariant":
skip = True
break
if dat[idx_col] != 'NA' and int(dat[idx_col])>= numBamFiles:
raise NameError('Error. Is the number of BAM files correctly specified?')
if pos == -1:
pos = int(dat[realpos_col])
var = dat[var_col]
chr = dat[chr_col]
else:
if int(dat[realpos_col])!=pos:
raise NameError('Inconsistent glf files! Is the number of BAM files correctly specified?')
if int(dat[idx_col]) != fidx:
sys.stderr.write("Error reading this variant: %s %d %s in %s\n" % (chr,pos,var, glffile))
tot_num_forward_old += int(dat[col_num_forward_old])
tot_num_reverse_old += int(dat[col_num_reverse_old])
if fidx == 0:
# only record for first individual
tot_num_forward = int(dat[col_num_forward])
tot_num_reverse = int(dat[col_num_reverse])
numreads = int(dat[col_num_reads])
if numreads>0:
num_ind_with_data += 1
tot_coverage += numreads
if skip:
continue
if done:
break
prob = float(dat[col_post_prob])
freq = float(dat[headerLabels.index('est_freq')])
if rdhist.has_key(tot_coverage):
rdhist[tot_coverage] += 1
else:
rdhist[tot_coverage] = 1
if prob>0.20:
if not varStat.has_key(chr):
varStat[chr] = {}
if not varStat[chr].has_key(pos):
varStat[chr][pos] = {}
# hplen
seq = fa.get(chr, pos+1-25,50)
hplen = AnalyzeSequence.HomopolymerLength(seq=seq, pos = 25)
varStat[chr][pos][var] = {'QUAL':prob,'NF':tot_num_forward, 'NR':tot_num_reverse, 'NFS':tot_num_forward_old, 'NRS':tot_num_reverse_old, 'DP':tot_coverage, 'NS':num_ind_with_data,'AF':freq,'HP':hplen}
del dat
# finished reading this one
fglf.close()
#print "Number of variants passing filters:", num_pass
# apply haplotype coverage and other filters
coverageRange = getPercentiles(rdhist, [1,99])
fqp = 1.0 - math.pow(10.0, -float(filterQual)/10.0)
fqp_str = "q%d" % filterQual
for chr in varStat.keys():
for pos in varStat[chr].keys():
for varseq, var in varStat[chr][pos].iteritems():
filters = []
prob = var['QUAL']
num_ind_with_data = var['NS']
hplen = var['HP']
freq = var['AF']
tot_coverage = var['DP']
tot_num_forward = var['NF']
tot_num_reverse = var['NR']
if prob<fqp:
filters.append(fqp_str)
if (tot_num_forward < minForwardReverse or tot_num_reverse < minForwardReverse) and not doNotFilterOnFR:
filters.append('fr0')
if tot_coverage < coverageRange[0] or tot_coverage>coverageRange[1]:
filters.append('ocr')
if num_ind_with_data<numInds/2:
filters.append('s50')
if hplen>maxHPLen:
filters.append("hp%d" % (maxHPLen))
if freq<minFreq:
filters.append("mf")
if filters == []:
if not pass_filters.has_key(chr):
pass_filters[chr]={}
if not pass_filters[chr].has_key(pos):
pass_filters[chr][pos]=[]
pass_filters[chr][pos].append(varseq)
num_pass += 1
if filters == []:
varStat[chr][pos][varseq]['filter'] = ''
else:
varStat[chr][pos][varseq]['filter'] = ';'.join(filters)
# now visit each chromosome and apply closeness filter
chromosomes = [str(c) for c in range(1,23)]
chromosomes.extend(['X','Y'])
other_chr = list(set(varStat.keys())-set(chromosomes))
chromosomes.extend(other_chr)
# create VCF file
print "Writing VCF"
fv = open(outputVCFFile, 'w')
fv.write("##fileformat=VCFv4.0\n")
fv.write("##source=Dindel\n")
fv.write("##reference=%s\n" % refFile)
fv.write("##INFO=<ID=NS,Number=1,Type=Integer,Description=\"Number of samples with data\">\n")
fv.write("##INFO=<ID=DP,Number=1,Type=Integer,Description=\"Total number of reads in haplotype window\">\n")
fv.write("##INFO=<ID=HP,Number=1,Type=Integer,Description=\"Reference homopolymer tract length\">\n")
fv.write("##INFO=<ID=NFS,Number=1,Type=Integer,Description=\"Number of reads covering non-ref variant site on forward strand\">\n")
fv.write("##INFO=<ID=NRS,Number=1,Type=Integer,Description=\"Number of reads covering non-ref variant site on reverse strand\">\n")
fv.write("##INFO=<ID=NF,Number=1,Type=Integer,Description=\"Number of reads covering non-ref variant on forward strand\">\n")
fv.write("##INFO=<ID=NR,Number=1,Type=Integer,Description=\"Number of reads covering non-ref variant on reverse strand\">\n")
fv.write("##INFO=<ID=AF,Number=-1,Type=Float,Description=\"Allele frequency\">\n")
fv.write("##INFO=<ID=DB,Number=0,Type=Flag,Description=\"dbSNP membership build 129 - type match and indel sequence length match within %d bp\">\n" % dbSNPWindow)
fv.write("##FILTER=<ID=q%d,Description=\"Quality below %d\">\n" % (filterQual, filterQual))
fv.write("##FILTER=<ID=s50,Description=\"Less than 50% of samples have data\">\n")
fv.write("##FILTER=<ID=tc%d,Description=\"Indel site was closer than %d base pairs from another site with higher posterior probability\">\n" % (minDist, minDist))
fv.write("##FILTER=<ID=hp%d,Description=\"Reference homopolymer length was longer than %d\">\n" % (maxHPLen, maxHPLen))
if not doNotFilterOnFR:
fv.write("##FILTER=<ID=fr0,Description=\"Non-ref allele is not covered by at least one read on both strands\">\n")
fv.write("##FILTER=<ID=ocr,Description=\"Number of reads in haplotype window outside coverage range %d %d\">\n" % (coverageRange[0], coverageRange[1]))
fv.write("##FILTER=<ID=mf,Description=\"Too low non-ref allele frequency\">\n")
fv.write("#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n")
for chr in chromosomes:
if not pass_filters.has_key(chr):
continue
# filter out variants that are too close
totSites = 0
positions = sorted(pass_filters[chr].keys())
newPosition = positions[:]
done = False
while not done:
done = True
for p in range(1,len(positions)):
if newPosition[p] != newPosition[p-1] and newPosition[p]-positions[p-1]<=minDist:
newPosition[p]=newPosition[p-1]
done = False
newSites = {}
for p in range(0, len(newPosition)):
newPos = newPosition[p]
pos = positions[p]
if not newSites.has_key(newPos):
newSites[newPos] = {}
if not newSites[newPos].has_key(pos):
newSites[newPos][pos]=[]
for var in varStat[chr][pos].keys():
newSites[newPos][pos].append(var)
print "New number of sites:", len(newSites.keys())
print "Number of sites filtered:",len(pass_filters[chr].keys())-len(newSites.keys())
# select best call for double sites
filtered = []
for newPos in newSites.keys():
old = newSites[newPos].keys()
pos_probs = []
pos_vars = []
pos_pos = []
for oldPos in old:
probs = []
vars = []
max_prob = -1.0
max_var = ''
for var in newSites[newPos][oldPos]:
prob=varStat[chr][oldPos][var]['QUAL']
if prob>max_prob:
max_prob = prob
max_var =var
pos_probs.append(max_prob)
pos_vars.append(max_var)
pos_pos.append(oldPos)
idx = pos_probs.index(max(pos_probs))
okpos = pos_pos[idx]
filtered.append(pos_pos[idx])
for duppos in set(old)-set([okpos]):
for var in varStat[chr][duppos].keys():
if varStat[chr][duppos][var]['filter'] == '':
varStat[chr][duppos][var]['filter'] == tcFilter
else:
varStat[chr][duppos][var]['filter']+=';'+tcFilter
print "Number of indel sites:",len(filtered)
for pos in sorted(varStat[chr].keys()):
for var in varStat[chr][pos].keys():
indel_report_pos = pos
#refall = fa.get(chr, pos+1, 1)
qual = -int(10.0*math.log10(max(1.0-float(varStat[chr][pos][var]['QUAL']),1e-10)))
infofield = []
for tag in ['AF','NS','DP','HP','NF','NR','NFS','NRS']:
val = (varStat[chr][pos][var][tag])
infofield.append("%s=%s" % (tag,val))
vnref = Variant.Variant(varString = var)
max_del_len = 0
if vnref.type == "del":
if vnref.length>max_del_len:
max_del_len = vnref.length
seqlen = 1 + max_del_len
refseq = ''.join(fa.get(chr, indel_report_pos, seqlen))
if vnref.type == "del":
altseq = refseq[0]+refseq[(1+vnref.length):]
elif vnref.type == "ins":
altseq = refseq[0]+vnref.seq+refseq[1:]
elif vnref.type == "snp":
indel_report_pos += 1
refseq = refseq[1]
altseq = vnref.seq[0]
infostr = ';'.join(infofield)
filterstr = varStat[chr][pos][var]['filter']
if filterstr == '':
filterstr = 'PASS'
id = '.'
outstr = "%s\t%d\t%s\t%s\t%s\t%d\t%s\t%s\n" % (chr, indel_report_pos, id, refseq, altseq, qual, filterstr, infostr)
fv.write(outstr)
fv.close()
def main(argv):
parser = OptionParser()
regions = ''
parser.add_option("-i","--inputFiles", dest = "inputFiles", help = "file that contains list of Dindel '.glf.txt' files that should be merged")
parser.add_option("-o","--outputFile", dest = "outputFile", help = "output VCF file with variant calls")
parser.add_option("-r","--refFile", dest = "refFile", help = "reference sequence _indexed_ Fasta file")
#parser.add_option("-t","--type", dest = "type", help = "type of input files. Choices for TYPE: diploid (single diploid samples), haploid (results in homozygous genotypes) or pool (for pools)", choices= typeOptions)
parser.add_option("--numSamples", dest = "numSamples", help = "number of samples")
parser.add_option("--numBamFiles", dest = "numBAMFiles", help = "number of BAM files", default = 1)
parser.add_option("--maxHPLen", dest = "maxHPLen", help = "maximum length of homopolymer run to call an indel", default = 10, type ="int")
#parser.add_option("--newVarCov", dest = "newVarCov", action = "store_true", default = False)
parser.add_option("--filterFR", dest = "filterFR", help = "filter on forward/reverse count of reads (stringent)", action = "store_true", default = False)
parser.add_option("--filterQual", dest = "filterQual", help = "quality below which variants are filtered", default = 20)
(options, args) = parser.parse_args()
if options.inputFiles == None:
sys.stderr.write("Please specify --inputFiles\n")
sys.exit(1)
if options.outputFile == None:
sys.stderr.write("Please specify --outputFile\n")
sys.exit(1)
if options.refFile == None:
sys.stderr.write("Please specify --refFile\n")
sys.exit(1)
if options.numSamples == None:
sys.stderr.write("Please specify --numSamples option\n")
sys.exit(1)
processPooledGLFFiles(glfFilesFile = options.inputFiles, maxHPLen = options.maxHPLen, refFile = options.refFile, outputVCFFile = options.outputFile, doNotFilterOnFR = (not options.filterFR), filterQual = int(options.filterQual), numSamples = int(options.numSamples), numBamFiles = int(options.numBAMFiles))
if __name__ == "__main__":
try:
main(sys.argv[1:])
except:
sys.stderr.write("An error occurred!\n")
raise
sys.exit(1)
|
genome/dindel-tgi
|
python/mergeOutputPooled.py
|
Python
|
gpl-3.0
| 22,740
|
[
"VisIt"
] |
9ffc512a6b2de6486d90218bfa8c3e32370643cdd98e5f9106daf2a23a5183ec
|
# -*- coding: utf-8 -*-
#
# Copyright © 2017 Mark Wolfman
#
# This file is part of Xanespy.
#
# Xanespy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Xanespy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Xanespy. If not, see <http://www.gnu.org/licenses/>.
"""A collection of callables that can be used for fitting spectra."""
from collections import namedtuple
from multiprocessing import Pool
import warnings
import functools
from typing import Tuple, Callable
import numpy as np
from scipy.optimize import leastsq, curve_fit
import tqdm
from . import exceptions
from .xanes_math import k_edge_jump, iter_indices, foreach
from .utilities import nproc, mp_map
__all__ = ('prepare_p0', 'fit_spectra', 'Curve', 'Line',
'LinearCombination', 'Gaussian', 'L3Curve', 'KCurve')
def guess_p0(func, spectra, edge=None, quiet=False, ncore=None):
"""Accept a number of spectra and try to get a good guess for initial
fitting params.
Parameters
----------
func : callable
The function that will be fit. It must have a ``guess_params``
method.
spectra : np.ndarray
A 2D array with the second dimension representing Energy (or
whatever the x axis is).
edge : optional
An XAS edge.
ncore : int, optional
How many processes to use in the pool. See
:func:`~xanespy.utilities.nproc` for more details.
Returns
-------
p0 : np.ndarray
A 2D array, the first dimension matches ``spectra`` and the
second dimension corresponds to the number of fitting parameters
for *func*.
"""
# Prepare a progress bar
if not quiet:
spectra = tqdm.tqdm(spectra, desc="Guessing initial params", unit='px')
# Execute the parameters guessing with multiprocessing
guess_params = functools.partial(func.guess_params, edge=edge,
named_tuple=False)
# with Pool(nproc(ncore)) as pool:
# p0 = np.array(pool.map(guess_params, spectra, chunksize=2000))
p0 = mp_map(guess_params, spectra, chunksize=2000)
return p0
def prepare_p0(p0, frame_shape, num_timesteps=1):
"""Create an initial parameter guess for fitting.
Takes a starting guess (p0) and returns a numpy array with this
inital guess that matches the frameset.
For example, if a frameset has 12 timesteps and (1024, 2048) frames,
then a 5-tuple input for ``p0`` will result in a return value with
shape (12, 5, 1024, 2048)
"""
# Prepare an empty array for the results
out_shape = (num_timesteps, *frame_shape, len(p0))
out = np.empty(shape=out_shape)
# Now populate the fields and put the param axis in the energy spot
out[:] = p0
out = np.moveaxis(out, -1, 1)
return out
def is_out_of_bounds(params, bounds) -> bool:
"""Test if the parameters are within valid bounds."""
if bounds is not None:
out_of_bounds = np.any(
np.logical_or(
np.less(params, bounds[0]),
np.greater(params, bounds[1])
)
)
else:
out_of_bounds = False
return out_of_bounds
def distance_to_bounds(params, bounds) -> float:
"""Determine the L2 distance from params to the bounds box."""
zeros = np.zeros_like(params)
bottom_distance = np.maximum(np.subtract(bounds[0], params), zeros)
top_distance = np.maximum(np.subtract(params, bounds[1]), zeros)
all_distance = bottom_distance + top_distance
total_distance = np.linalg.norm(all_distance)
return total_distance
def error(guess: np.ndarray, obs: np.ndarray, func: Callable,
bounds=None) -> np.ndarray:
"""Compare observed and predicted signal and return the difference.
Parameters
----------
guess
Parameters given to *func* to calculate predicted data.
obs
Observed data
func
A function that accepts *guess* parameters and returns the predicted signal. It will be used as
``predicted = func(*guess)``.
nonnegative
If truthy, negative parameters in *guess* will be heavily
punished. This can also be an array of booleans for each entry
in *guess* (eg ``(True, False, False, ...)``).
bounds : 2-tuple of array_like, optional
Defines upper and lower bounds for fitting. See
:py:function:``scipy.optimize.curve_fit`` for more details.
Returns
-------
diff
An array of difference values between the predicted values and
the observed values.
"""
# if is_out_of_bounds(guess, bounds):
# # Punish negative values
# diff = np.empty_like(obs)
# diff[:] = 1e6
# else:
# # Compare predicted with observed values
# predicted = func(*guess)
# diff = np.abs(obs - predicted)
# assert not np.any(np.isnan(diff))
# Compare predicted with observed values
predicted = func(*guess)
diff = np.abs(obs - predicted)
assert not np.any(np.isnan(diff))
# Punish out of bounds values
if is_out_of_bounds(guess, bounds):
diff += distance_to_bounds(guess, bounds) * 10
return diff
def _fit_sources(inputs, func, nonnegative=False, bounds=None):
spectrum, p0 = inputs
# Don't bother fitting if there's NaN values
if np.any(np.isnan(spectrum)):
p_fit = np.empty(p0.shape)
p_fit[()] = np.nan
res_ = np.nan
return p_fit, res_
# Calculate bounds for fitting if needed
if bounds is None and nonnegative:
# Set bounds to 0 and np.inf
bounds = (
[0] * len(p0),
[np.inf] * len(p0)
)
elif bounds is None:
# Set bounds to -np.inf and np.inf
bounds = (
[-np.inf] * len(p0),
[np.inf] * len(p0)
)
if is_out_of_bounds(p0, bounds):
msg = 'Guess {} is outside of bounds {}'.format(p0, bounds)
warnings.warn(msg, RuntimeWarning)
# Valid data, so fit the spectrum
results = leastsq(func=error, x0=p0, args=(spectrum, func, bounds), full_output=True)
p_fit, cov_x, infodict, mesg, status = results
# Status 4 is often a sign of mismatched datatypes.
if status == 4:
msg = "Precision errors encountered during fitting. Check dtypes."
warnings.warn(msg, RuntimeWarning)
# Calculate residual errors
res_ = (spectrum - func(*p_fit))
res_ = np.sqrt(np.mean(np.power(res_, 2)))
return (p_fit, res_)
def find_whiteline(params, curve):
fit = curve(*params)
whiteline = curve.x[np.argmax(fit)]
return whiteline
def fit_spectra(observations, func, p0, nonnegative=False, bounds=None, quiet=False, ncore=None):
"""Fit a function to a series observations.
The shapes of ``observations`` and ``p0`` parameters must match in
the first dimension, and the callable ``func`` should take a
series of parameters (the exact number is determined by the last
dimension of ``p0``) and return a set of observations (the length
of which is determined by the last dimension of ``observations``).
Parameters
----------
observations : np.ndarray
A 1- or 2-dimensional array of observations against which to fit
the function ``func``.
func : callable, str
The function that will be used for fitting. It should match
``func(p0, p1, ...)`` where p0, p1, etc are the fitting
parameters. Some useful functions can be found in the
``xanespy.fitting`` module.
p0 : np.ndarray
Initial guess for parameters, with similar dimensions to a
frameset. Example, fitting 3 sources (plus offset) for a (1,
40, 256, 256) 40-energy frameset requires p0 to be (1, 4,
256, 256).
nonnegative : bool, optional
If true (default), negative parameters will be avoided. This
can also be a tuple to allow for fine-grained control. Eg:
(True, False) will only punish negative values in the first
of the two parameters.
bounds : 2-tuple of array_like, optional
Defines upper and lower bounds for fitting. See
:py:function:``scipy.optimize.curve_fit`` for more details.
quiet : bool, optional
Whether to suppress the progress bar, etc.
ncore : int, optional
How many processes to use in the pool. See
:func:`~xanespy.utilities.nproc` for more details.
Returns
-------
params : numpy.ndarray
The fit parameters (as frames) for each source.
residuals : numpy.ndarray
Residual error after fitting, as maps.
"""
# Massage the datas
observations = np.array(observations)
if observations.ndim == 1:
observations = observations.reshape((1, len(observations)))
one_dimensional = True
else:
one_dimensional = False
p0 = np.array(p0)
if p0.ndim == 1:
p0_shape = (observations.shape[1], p0.shape[0])
p0 = np.broadcast_to(p0, p0_shape)
params = np.empty_like(p0)
# Execute fitting for each spectrum
indices = iter_indices(observations, desc="Fitting spectra",
leftover_dims=1, quiet=quiet)
# Execute fitting (with multiprocessing)
payload = zip(observations, p0)
if not quiet:
payload = tqdm.tqdm(payload, total=len(observations),
desc="Fitting spectra", unit='spctrm')
with Pool(nproc(ncore)) as pool:
fitter = functools.partial(_fit_sources, func=func, nonnegative=nonnegative, bounds=bounds)
params = pool.map(fitter, payload)
# Prepare the results for returning
params, residuals = zip(*params)
params = np.array(params)
residuals = np.array(residuals)
if one_dimensional:
params = params[0]
residuals = residuals[0]
return (params, residuals)
class Curve():
"""Base class for a callabled Curve."""
name = "curve"
param_names = () # type: Tuple[str, ...]
def __init__(self, x):
self.x = x
def guess_params(self, intensities, edge, named_tuple=True):
raise NotImplementedError()
def NamedTuple(self, *params):
"""Return a named tuple with the given parameters."""
Params = namedtuple('{}_params'.format(self.name), self.param_names)
return Params(*params)
class Line(Curve):
def guess_params(self, intensities, edge, named_tuple=True):
# Get slope from first and last points
m = (intensities[-1] - intensities[0]) / (self.x[-1] - self.x[0])
# Extrapolate to y-intercept
b = intensities[0] - m * self.x[0]
return (m, b)
def __call__(self, m, b):
return m*self.x + b
class LinearCombination(Curve):
"""Combines other curves into one callable.
The constructor accepts the keyword argument ``sources``, which
should be a list of numpy arrays. The resulting object can then be
called with parameters for the weight of each function plus an
offset. For example, with two sources, the object is called as
.. code:: python
# Prepare the separate sources
x = np.linspace(0, 2*np.pi, num=361)
sources = [np.sin(x), np.sin(2*x)]
# Produce a combo with 0.5*sin(x) + 0.25*sin(2x) + 2
lc = LinearCombination(sources=sources)
out = lc(0.5, 0.25, 2)
The final output will have the same shape as the sources, which
should all be the same shape as each other.
"""
name = "linear_combination"
def __init__(self, sources):
self.sources = sources
def __call__(self, *params):
# Prepare data and parameters
out = 0
p_sources = params[0:-1]
# Add each source weighted by input parameters
for coeff, source in zip(p_sources, self.sources):
out += coeff * source
# Add global offset
out += params[-1]
return out
@property
def param_names(self):
names = ['weight_%d' % idx for idx in range(len(self.sources))]
names.append('offset')
names = tuple(names)
return names
class Gaussian(Curve):
"""A Gaussian curve.
Mathematically:
.. math::
y = a e^{\\frac{-(x-b)**2}{2c^2}}
Parameters
----------
x : np.ndarray
Array of x-values to input into the Gaussian function.
"""
name = "gaussian"
param_names = ('height', 'center', 'width')
def __call__(self, height, center, width):
x = self.x
a, b, c = (height, center, width)
return a * np.exp(-(x-b)**2 / 2 / c**2)
class L3Curve(Curve):
"""An L_3 absorption edge.
This function is a combination of two Gaussian peaks and a step
function. The first 3 parameters give the height, position and
width of one peak, and parameters 3:6 give the same for a second
peak. Parameters 6:9 are height, position and width of an arctan
step function. Parameter 9 is a global offset.
Parameters
----------
peaks : int, optional
How many peaks to fit across the edge.
"""
name = "L3-gaussian"
def __init__(self, x, num_peaks=2):
self.x = x
self.num_peaks = num_peaks
self.dtype = x.dtype
def __call__(self, *params):
Params = namedtuple('L3Params', self.param_names)
p = Params(*params)
Es = self.x
# Add two gaussian fields
out = np.zeros_like(Es)
gaussian = Gaussian(x=Es)
for idx in range(self.num_peaks):
i = 3*idx
p_i = p[i:i+3]
out += gaussian(*p_i)
# Add arctan step function
out += p.sig_height * (np.arctan((Es-p.sig_center)*p.sig_sigma) / np.pi + 0.5)
# Add vertical offset
out += p.offset
return out
@property
def param_names(self):
pnames = []
# Add Gaussian parameters
for idx in range(self.num_peaks):
pnames.append('height_%d' % idx)
pnames.append('center_%d' % idx)
pnames.append('sigma_%d' % idx)
# Add sigmoid parameters
pnames.append('sig_height')
pnames.append('sig_center')
pnames.append('sig_sigma')
# Add global y-offset parameter
pnames.append('offset')
return tuple(pnames)
class KCurve(Curve):
"""A K absorption edge.
**Fit Parameters:**
scale
Overall scale factor for curve
voffset
Overall vertical offset for the curve
E0
Edge position as energy of maximum in second derivative at edge
sigw
Sharpenss of the edge sigmoid
bg_slope
Linear increase/-decrease in background optical depth
ga
Height parameter for Gaussian whiteline peak
gb
Center parameter in eV (relative to E0) for Gaussian whiteline
peak
gc
Width parameter for Gaussian whiteline peak
"""
name = "K_edge_curve"
param_names = (
'scale', 'voffset', 'E0', # Global parameters
'sigw', # Sharpness of the edge sigmoid
'bg_slope', # Linear reduction in background optical_depth
'ga', 'gb', 'gc', # Gaussian height, center and width
)
def guess_params(self, intensities, edge, named_tuple=True):
"""Guess initial starting parameters for a k-edge curve. This will
give a rough estimate, appropriate for giving to the fit_kedge
function as the starting parameters, p0.
Arguments
---------
intensities : np.ndarray
An array containing optical_depth data that represents a
K-edge spectrum. Only 1-dimensional data are currently
accepted.
edge : xanespy.edges.KEdge
An X-ray Edge object, will be used for estimating the actual
edge energy itself.
named_tuple : bool, optional
If truthy, the result will be a named tuple, otherwise a simple tuple.
Returns
-------
p0 : tuple
An iterable with the estimated parameters (see KEdgeParams
for definition)
"""
Is = np.array(intensities)
if Is.shape != self.x.shape:
raise ValueError('Intensities and energies do not have the same shape: {} vs {}'
''.format(Is.shape, self.x.shape))
# Guess the overall scale and offset parameters
scale = k_edge_jump(frames=Is, energies=self.x, edge=edge)
voffset = np.min(Is)
# Estimate the edge position
E0 = edge.E_0
# Estimate the whiteline Gaussian parameters
ga = 5 * (np.max(Is) - scale - voffset)
gb = self.x[np.argmax(Is)] - E0
gc = 2 # Arbitrary choice, should improve this in the future
# Construct the parameters tuple
KParams = namedtuple('KParams', self.param_names)
p0 = KParams(scale=scale, voffset=voffset, E0=E0,
sigw=0.5, bg_slope=0,
ga=ga, gb=gb, gc=gc)
if not named_tuple:
p0 = tuple(p0)
return p0
def __call__(self, *params):
# Named tuple to help keep track of parameters
Params = namedtuple('Params', self.param_names)
p = Params(*params)
x = self.x
# Adjust the x's to be relative to E_0
x = x - p.E0
# Sigmoid
sig = np.arctan(x*p.sigw) / np.pi + 1/2
# Gaussian
gaus = p.ga*np.exp(-(x-p.gb)**2/2/p.gc**2)
# Background
bg = x * p.bg_slope
curve = sig + gaus + bg
curve = p.scale * curve + p.voffset
return curve
|
canismarko/xanespy
|
xanespy/fitting.py
|
Python
|
gpl-3.0
| 18,115
|
[
"Gaussian"
] |
a3b732a132b6b87ccce812fbbb9e93c7008457f9c68cc26ad9d8282dfcce0d59
|
#!/usr/bin/env python3
#pylint: disable=missing-docstring
#* This file is part of the MOOSE framework
#* https://www.mooseframework.org
#*
#* All rights reserved, see COPYRIGHT for full restrictions
#* https://github.com/idaholab/moose/blob/master/COPYRIGHT
#*
#* Licensed under LGPL 2.1, please see LICENSE for details
#* https://www.gnu.org/licenses/lgpl-2.1.html
import chigger
# Open the result
file_name = '../input/mug_blocks_out.e'
reader = chigger.exodus.ExodusReader(file_name)
mug = chigger.exodus.ExodusResult(reader, range=[0,2], variable='convected', cmap='magma')
time = chigger.annotations.TimeAnnotation(layer=2, font_size=48, color=[1,0,1], prefix='',
suffix='', timedelta=False,
justification='center', position=[0.5,0.5],
vertical_justification='middle')
# Create the window
window = chigger.RenderWindow(time, mug, size=[300,300], test=True)
reader.update()
times = reader.getTimes()
for i in range(10):
time.update(time=times[i])
reader.setOptions(timestep=i)
window.update()
window.write('time_annotation_change_' + str(i) + '.png')
window.start()
|
nuclear-wizard/moose
|
python/chigger/tests/annotations/time_annotation_change.py
|
Python
|
lgpl-2.1
| 1,218
|
[
"MOOSE"
] |
8e6a2687d0ec3fb8321ae83d861df8e6c4fefced0b02f995a061f529aa751a5e
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
r"""Multivariate autoregressive model (vector autoregression).
Implements the following model (num_blocks = max(ar_order, ma_order + 1)):
y(t, 1) = \sum_{i=1}^{ar_order} ar_coefs[i] * y(t - 1, i)
y(t, i) = y(t - 1, i - 1) + ma_coefs[i - 1] * e(t) for 1 < i < num_blocks
y(t, num_blocks) = y(t - 1, num_blocks - 1) + e(t)
Where e(t) are Gaussian with zero mean and learned covariance.
Each element of ar_coefs and ma_coefs is a [num_features x num_features]
matrix. Each y(t, i) is a vector of length num_features. Indices in the above
equations are one-based. Initial conditions y(0, i) come from prior state (which
may either be learned or left as a constant with high prior covariance).
If ar_order > ma_order, the observation model is:
y(t, 1) + observation_noise(t)
If ma_order >= ar_order, it is (to observe the moving average component):
y(t, 1) + y(t, num_blocks) + observation_noise(t)
Where observation_noise(t) are Gaussian with zero mean and learned covariance.
This implementation uses a formulation which puts all of the autoregressive
coefficients in the transition equation for the observed component, which
enables learning using truncated backpropagation. Noise is not applied directly
to the observed component (with the exception of standard observation noise),
which further aids learning of the autoregressive coefficients when VARMA is in
an ensemble with other models (in which case having an observation noise term is
usually unavoidable).
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.timeseries.python.timeseries import math_utils
from tensorflow.contrib.timeseries.python.timeseries.state_space_models import state_space_model
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import variable_scope
class VARMA(state_space_model.StateSpaceModel):
"""A VARMA model implementation as a special case of the state space model."""
def __init__(self,
autoregressive_order,
moving_average_order,
configuration=state_space_model.StateSpaceModelConfiguration()):
"""Construct a VARMA model.
The size of the latent state for this model is:
num_features * max(autoregressive_order, moving_average_order + 1)
Square matrices of this size are constructed and multiplied.
Args:
autoregressive_order: The maximum autoregressive lag.
moving_average_order: The maximum moving average lag, after which
transient deviations are expected to return to their long-term mean.
configuration: A StateSpaceModelConfiguration object.
"""
self.ar_order = autoregressive_order
self.ma_order = moving_average_order
self.state_num_blocks = max(autoregressive_order, moving_average_order + 1)
super(VARMA, self).__init__(configuration=configuration)
self.state_dimension = self.state_num_blocks * self.num_features
def _define_parameters(self, observation_transition_tradeoff_log=None):
with variable_scope.variable_scope(self._variable_scope):
# TODO(allenl): Evaluate parameter transformations for AR/MA coefficients
# which improve interpretability/stability.
self.ar_coefs = variable_scope.get_variable(
name="ar_coefs",
shape=[self.num_features, self.num_features, self.ar_order],
dtype=self.dtype,
initializer=init_ops.zeros_initializer())
self.ma_coefs = variable_scope.get_variable(
name="ma_coefs",
initializer=array_ops.tile(
linalg_ops.eye(self.num_features, dtype=self.dtype)[None, :, :],
[self.ma_order, 1, 1]),
dtype=self.dtype)
super(VARMA, self)._define_parameters(
observation_transition_tradeoff_log=observation_transition_tradeoff_log)
def get_state_transition(self):
"""Construct state transition matrix from VARMA parameters.
Returns:
the state transition matrix. It has shape
[self.state_dimendion, self.state_dimension].
"""
# Pad any unused AR blocks with zeros. The extra state is necessary if
# ma_order >= ar_order.
ar_coefs_padded = array_ops.reshape(
array_ops.pad(self.ar_coefs,
[[0, 0], [0, 0],
[0, self.state_num_blocks - self.ar_order]]),
[self.num_features, self.state_dimension])
shift_matrix = array_ops.pad(
linalg_ops.eye(
(self.state_num_blocks - 1) * self.num_features, dtype=self.dtype),
[[0, 0], [0, self.num_features]])
return array_ops.concat([ar_coefs_padded, shift_matrix], axis=0)
def get_noise_transform(self):
"""Construct state noise transform matrix from VARMA parameters.
Returns:
the state noise transform matrix. It has shape
[self.state_dimendion, self.num_features].
"""
# Noise is broadcast, through the moving average coefficients, to
# un-observed parts of the latent state.
ma_coefs_padded = array_ops.reshape(
array_ops.pad(self.ma_coefs,
[[self.state_num_blocks - 1 - self.ma_order, 0], [0, 0],
[0, 0]]),
[(self.state_num_blocks - 1) * self.num_features, self.num_features],
name="noise_transform")
# Deterministically apply noise to the oldest component.
return array_ops.concat(
[ma_coefs_padded,
linalg_ops.eye(self.num_features, dtype=self.dtype)],
axis=0)
def get_observation_model(self, times):
"""Construct observation model matrix from VARMA parameters.
Args:
times: A [batch size] vector indicating the times observation models are
requested for. Unused.
Returns:
the observation model matrix. It has shape
[self.num_features, self.state_dimension].
"""
del times # StateSpaceModel will broadcast along the batch dimension
if self.ar_order > self.ma_order or self.state_num_blocks < 2:
return array_ops.pad(
linalg_ops.eye(self.num_features, dtype=self.dtype),
[[0, 0], [0, self.num_features * (self.state_num_blocks - 1)]],
name="observation_model")
else:
# Add a second observed component which "catches" the accumulated moving
# average errors as they reach the end of the state. If ar_order >
# ma_order, this is unnecessary, since accumulated errors cycle naturally.
return array_ops.concat(
[
array_ops.pad(
linalg_ops.eye(self.num_features, dtype=self.dtype),
[[0, 0], [0,
self.num_features * (self.state_num_blocks - 2)]]),
linalg_ops.eye(self.num_features, dtype=self.dtype)
],
axis=1,
name="observation_model")
def get_state_transition_noise_covariance(
self, minimum_initial_variance=1e-5):
# Most state space models use only an explicit observation noise term to
# model deviations from expectations, and so a low initial transition noise
# parameter is helpful there. Since deviations from expectations are also
# modeled as transition noise in VARMA, we set its initial value based on a
# slight over-estimate empirical observation noise.
if self._input_statistics is not None:
feature_variance = self._scale_variance(
self._input_statistics.series_start_moments.variance)
initial_transition_noise_scale = math_ops.log(
math_ops.maximum(
math_ops.reduce_mean(feature_variance), minimum_initial_variance))
else:
initial_transition_noise_scale = 0.
state_noise_transform = ops.convert_to_tensor(
self.get_noise_transform(), dtype=self.dtype)
state_noise_dimension = state_noise_transform.get_shape()[1].value
return math_utils.variable_covariance_matrix(
state_noise_dimension, "state_transition_noise",
dtype=self.dtype,
initial_overall_scale_log=initial_transition_noise_scale)
|
eadgarchen/tensorflow
|
tensorflow/contrib/timeseries/python/timeseries/state_space_models/varma.py
|
Python
|
apache-2.0
| 8,893
|
[
"Gaussian"
] |
03dc363362f05267f2e6b977013235098dab655ab6ef57554c1208d39993d93f
|
tests = [("python", "testPickers.py", {}), ("python", "testMaxMin.py", {})]
longTests = []
if __name__ == '__main__':
import sys
from rdkit import TestRunner
failed, tests = TestRunner.RunScript('test_list.py', 0, 1)
sys.exit(len(failed))
|
rvianello/rdkit
|
Code/SimDivPickers/Wrap/test_list.py
|
Python
|
bsd-3-clause
| 249
|
[
"RDKit"
] |
a525a832b2cbbd3e654521fa59df64a88973c8d85d7b3394baddd55df50616e6
|
#!/usr/bin/env python3
"""
This is to assign functional annotation to gene models using a wide variety of evidence. Meant to
support unlimited hierarchy rules, this utility relies on a user-created configuration file.
Current limitations:
- Rules are evaluated on a per-gene basis, so no rules are currently possible that
would need to consider annotations of other genes in the set.
- IGS 'PFunc' hierarchy
http://imgur.com/1odYcT5
"""
import argparse
import cProfile
import math
import os
import re
import biocode.annotation
import biocode.gff
import biocode.utils
import biocode.things
import sqlite3
import sys
import xml.etree.ElementTree as etree
import yaml
def main():
parser = argparse.ArgumentParser( description='Assigns functional annotation based on user-configurable evidence tiers')
## output file to be written
parser.add_argument('-c', '--config_file', type=str, required=True, help='Configuration file for annotation' )
parser.add_argument('-o', '--output_base', type=str, required=True, help='Base name/path of output files to be created' )
parser.add_argument('-f', '--output_format', type=str, required=False, default='gff3', help='Desired output format' )
parser.add_argument('-npp', '--no_product_processing', action="store_true", help="Pass this to turn off post-processing of gene product names")
args = parser.parse_args()
sources_log_fh = open("{0}.sources.log".format(args.output_base), 'wt')
configuration = yaml.load(open(args.config_file).read())
check_configuration(configuration, args)
evidence = parse_evidence_config(configuration)
default_product_name = configuration['general']['default_product_name']
# stores any active SQLite3 db connections
db_conn = dict()
# this is a dict of biothings.Polypeptide objects
polypeptides = initialize_polypeptides(sources_log_fh, configuration['input']['polypeptide_fasta'], default_product_name)
for label in configuration['order']:
if label not in evidence:
raise Exception("ERROR: There is a label '{0}' in the 'order' section of the conf file that isn't present in the 'evidence' section".format(label))
if evidence[label]['type'] == 'HMMer3_htab':
index_conn, ev_db_conn = get_or_create_db_connections(type_ev='hmm_ev', configuration=configuration,
evidence=evidence, label=label, db_conn=db_conn, output_base=args.output_base)
index_conn.isolation_level = None
apply_hmm_evidence(polypeptides=polypeptides, ev_conn=ev_db_conn, config=configuration,
ev_config=evidence[label], label=label, index_conn=index_conn, log_fh=sources_log_fh)
elif evidence[label]['type'] == 'RAPSearch2_m8':
index_conn, ev_db_conn = get_or_create_db_connections(type_ev='blast_ev', configuration=configuration,
evidence=evidence, label=label, db_conn=db_conn, output_base=args.output_base)
index_conn.isolation_level = None
apply_blast_evidence(polypeptides=polypeptides, ev_conn=ev_db_conn, config=configuration,
ev_config=evidence[label], label=label, index_conn=index_conn, log_fh=sources_log_fh)
elif evidence[label]['type'] == 'TMHMM':
index_conn, ev_db_conn = get_or_create_db_connections(type_ev='tmhmm_ev', configuration=configuration,
evidence=evidence, label=label, db_conn=db_conn, output_base=args.output_base)
apply_tmhmm_evidence(polypeptides=polypeptides, ev_conn=ev_db_conn, config=configuration,
ev_config=evidence[label], label=label, log_fh=sources_log_fh)
elif evidence[label]['type'] == 'lipoprotein_motif_bsml':
index_conn, ev_db_conn = get_or_create_db_connections(type_ev='lipoprotein_motif_ev', configuration=configuration,
evidence=evidence, label=label, db_conn=db_conn, output_base=args.output_base)
apply_lipoprotein_motif_evidence(polypeptides=polypeptides, ev_conn=ev_db_conn, config=configuration,
ev_config=evidence[label], label=label, log_fh=sources_log_fh)
else:
raise Exception("ERROR: Unsupported evidence type '{0}' with label '{1}' in configuration file".format(evidence[label]['type'], label))
# close all db connections
for label in db_conn:
db_conn[label].close()
polyset = biocode.things.PolypeptideSet()
polyset.load_from_dict(polypeptides)
# Do any post-processing
if not args.no_product_processing:
for polypeptide in polyset.polypeptides:
annot = polypeptide.annotation
annot.set_processed_product_name()
annot.set_processed_gene_symbol()
perform_final_checks(polypeptides=polypeptides, config=configuration, log_fh=sources_log_fh)
# Write the output
if args.output_format == 'fasta':
polyset.write_fasta(path="{0}.faa".format(args.output_base))
elif args.output_format == 'gff3':
## parse input GFF
(assemblies, ref_features) = biocode.gff.get_gff3_features( configuration['input']['gff3'] )
## merge annotation with polypeptide collection
biocode.gff.add_annotation(features=ref_features, polypeptide_set=polyset)
## print the new GFF
biocode.gff.print_gff3_from_assemblies(assemblies=assemblies, ofh=open("{0}.gff3".format(args.output_base), 'wt'))
def already_indexed(path=None, index=None):
curs = index.cursor()
curs.execute("SELECT id FROM data_sources WHERE source_path = ?", (path, ))
found = False
for row in curs:
found = True
break
curs.close()
return found
def apply_blast_evidence(polypeptides=None, ev_conn=None, config=None, ev_config=None, label=None, index_conn=None, log_fh=None):
"""
Uses BLAST (or similar) evidence to assign functional evidence to polypeptides. Description of arguments:
polypeptides: a dict of biothings.Polypeptides objects, keyed on ID
ev_conn: SQLite3 connection to the parsed BLAST(ish) search evidence db for this set of searches
config: The yaml object for the parsed annotation config file
ev_config: The parsed evidence section for this label within the annotation config file
label: Label for the evidence track entry within the annotation config file
index_conn: SQLite3 connection to the reference index for the database searched
"""
print("DEBUG: Processing BLAST (or blast-like) evidence tier label: {0}".format(label))
default_product = config['general']['default_product_name']
ev_curs = ev_conn.cursor()
# Doing all the cursors here prevents doing it repeatedly within calling each accession. Huge performance improvement.
index_acc_curs = index_conn.cursor()
index_acc_curs.execute("begin")
index_ec_curs = index_conn.cursor()
index_go_curs = index_conn.cursor()
ev_qry = "SELECT sbj_id, align_len, perc_identity, eval, bit_score FROM blast_hit WHERE qry_id = ? ORDER BY eval ASC"
print("DEBUG: Applying {1} results to {0} polypeptides".format(len(polypeptides), label))
blast_class_limit = None
if 'class' in ev_config:
blast_class_limit = ev_config['class']
prepend_text = None
if 'prepend_text' in ev_config:
prepend_text = ev_config['prepend_text']
append_text = None
if 'append_text' in ev_config:
append_text = ev_config['append_text']
if 'debugging_polypeptide_limit' in config['general']:
DEBUG_LIMIT = config['general']['debugging_polypeptide_limit']
# Are coverage cutoffs defined?
query_cov_cutoff = None
match_cov_cutoff = None
if 'query_cov' in ev_config:
query_cov_cutoff = int(ev_config['query_cov'].rstrip('%'))
if 'match_cov' in ev_config:
match_cov_cutoff = int(ev_config['match_cov'].rstrip('%'))
percent_identity_cutoff = None
if 'percent_identity_cutoff' in ev_config:
percent_identity_cutoff = int(ev_config['percent_identity_cutoff'].rstrip('%'))
for id in polypeptides:
polypeptide = polypeptides[id]
print("DEBUG: Parsing {0} evidence for polypeptide ID {1}, length: {2}".format(label, id, polypeptide.length))
annot = polypeptide.annotation
DEBUG_LIMIT = DEBUG_LIMIT - 1
if DEBUG_LIMIT == 0:
break
if config['general']['allow_attributes_from_multiple_sources'] == 'Yes':
raise Exception("ERROR: Support for the general:allow_attributes_from_multiple_sources=Yes setting not yet implemented")
else:
if annot.product_name != default_product: continue
for ev_row in ev_curs.execute(ev_qry, (polypeptide.id,)):
if query_cov_cutoff is not None:
perc_coverage = (ev_row[1] / polypeptide.length) * 100
if perc_coverage < query_cov_cutoff:
print("\tSkipping accession {0} because coverage {1}% doesn't meet cutoff {2}% requirement".format(
ev_row[0], perc_coverage, query_cov_cutoff))
continue
if percent_identity_cutoff is not None:
if ev_row[2] < percent_identity_cutoff:
print("\tSkipping accession {0} because percent identity of {1}% doesn't meet cutoff {2}% requirement".format(
ev_row[0], ev_row[2], percent_identity_cutoff))
continue
blast_annot = get_blast_accession_info(conn=index_conn, accession=ev_row[0], config=config, acc_curs=index_acc_curs, ec_curs=index_ec_curs, go_curs=index_go_curs)
# is there a class limitation?
if blast_class_limit is not None:
if blast_class_limit == 'trusted':
if 'is_characterized' in blast_annot.other_attributes:
if blast_annot.other_attributes['is_characterized'] != 1:
print("\tSkipping accession {0} because it is not characterized".format(ev_row[0]))
continue
else:
print("\tAccepting accession {0} because it is characterized".format(ev_row[0]))
pass
else:
raise Exception("ERROR: Unrecognized value ('{0}') for class in config file".format(blast_class_limit))
if match_cov_cutoff is not None:
if 'ref_len' not in blast_annot.other_attributes:
print("\tSkipping accession {0} because length wasn't found".format(ev_row[0]))
continue
match_coverage = (ev_row[1] / blast_annot.other_attributes['ref_len'])*100
if match_coverage < match_cov_cutoff:
print("\tSkipping accession {0} because match coverage {1}% doesn't meet cutoff {2}% requirement".format(
ev_row[0], match_coverage, match_cov_cutoff))
continue
if prepend_text is None:
annot.product_name = blast_annot.product_name
else:
annot.product_name = "{0} {1}".format(prepend_text, blast_annot.product_name)
if append_text is not None:
annot.product_name = "{0} {1}".format(annot.product_name, append_text)
log_fh.write("INFO: {1}: Set product name to '{0}' from {3} hit to {2}\n".format(
annot.product_name, id, ev_row[0], label))
annot.gene_symbol = blast_annot.gene_symbol
log_fh.write("INFO: {1}: Set gene_symbol to '{0}' from {3} hit to {2}\n".format(
annot.gene_symbol, id, ev_row[0], label))
for go_annot in blast_annot.go_annotations:
annot.add_go_annotation(go_annot)
for ec_num in blast_annot.ec_numbers:
annot.add_ec_number(ec_num)
# If we get this far we've assigned annotation and don't want to look at any more
break
ev_curs.close()
index_acc_curs.close()
index_ec_curs.close()
index_go_curs.close()
def apply_hmm_evidence(polypeptides=None, ev_conn=None, config=None, ev_config=None, label=None, index_conn=None, log_fh=None):
"""
Uses HMM evidence to assign functional evidence to polypeptides. Description of arguments:
polypeptides: a dict of biothings.Polypeptides objects, keyed on ID
ev_conn: SQLite3 connection to the parsed HMM search evidence db for this set of searches
config: The yaml object for the parsed annotation config file
ev_config: The parsed evidence section for this label within the annotation config file
label: Label for the evidence track entry within thet annotation config file
index_conn: SQLite3 connection to the reference index for the database searched
"""
default_product = config['general']['default_product_name']
ev_curs = ev_conn.cursor()
ev_qry = "SELECT hmm_accession, total_score FROM hmm_hit WHERE qry_id = ? ORDER BY total_hit_eval ASC"
acc_main_curs = index_conn.cursor()
acc_main_curs.execute("begin")
hmm_class_limit = None
go_curs = index_conn.cursor()
go_qry = "SELECT go_id FROM hmm_go WHERE hmm_id = ?"
ec_curs = index_conn.cursor()
ec_qry = "SELECT ec_id FROM hmm_ec WHERE hmm_id = ?"
if 'class' in ev_config:
hmm_class_limit = ev_config['class']
prepend_text = None
if 'prepend_text' in ev_config:
prepend_text = ev_config['prepend_text']
append_text = None
if 'append_text' in ev_config:
append_text = ev_config['append_text']
print("DEBUG: Applying HMM results to {0} polypeptides".format(len(polypeptides)))
if 'debugging_polypeptide_limit' in config['general']:
DEBUG_LIMIT = config['general']['debugging_polypeptide_limit']
for id in polypeptides:
#print("DEBUG: Parsing {0} evidence for polypeptide ID {1}".format(label, id))
polypeptide = polypeptides[id]
annot = polypeptide.annotation
DEBUG_LIMIT = DEBUG_LIMIT - 1
if DEBUG_LIMIT == 0:
break
if config['general']['allow_attributes_from_multiple_sources'] == 'Yes':
raise Exception("ERROR: Support for the general:allow_attributes_from_multiple_sources=Yes setting not yet implemented")
else:
# If this has changed already, it has already been annotated
if annot.product_name != default_product: continue
for ev_row in ev_curs.execute(ev_qry, (polypeptide.id,)):
acc_main_qry = "SELECT version, hmm_com_name, ec_num, isotype, id FROM hmm WHERE (version = ? or accession = ?)"
if hmm_class_limit is None:
acc_main_qry_args = (ev_row[0], ev_row[0], )
else:
acc_main_qry += " AND isotype = ?"
acc_main_qry_args = (ev_row[0], ev_row[0], hmm_class_limit)
for acc_main_row in acc_main_curs.execute(acc_main_qry, acc_main_qry_args):
if prepend_text is None:
annot.product_name = acc_main_row[1]
else:
annot.product_name = "{0} {1}".format(prepend_text, acc_main_row[1])
if append_text is not None:
annot.product_name = "{0} {1}".format(annot.product_name, append_text)
log_fh.write("INFO: {1}: Set product name to '{0}' from {3} hit to {2}, isotype:{3}\n".format(
annot.product_name, id, ev_row[0], hmm_class_limit, label))
if acc_main_row[2] is not None:
annot.gene_symbol = acc_main_row[2]
log_fh.write("INFO: {1}: Set gene_symbol to '{0}' from {3} hit to {2}, isotype:{3}\n".format(
annot.gene_symbol, id, ev_row[0], hmm_class_limit, label))
## add any matching GO terms
for go_row in go_curs.execute(go_qry, (acc_main_row[4],)):
annot.add_go_annotation(biocode.annotation.GOAnnotation(go_id=go_row[0]))
## add any matching EC numbers
for ec_row in ec_curs.execute(ec_qry, (acc_main_row[4],)):
annot.add_ec_number(biocode.annotation.ECAnnotation(number=ec_row[0]))
break
acc_main_curs.close()
ev_curs.close()
go_curs.close()
ec_curs.close()
def apply_lipoprotein_motif_evidence(polypeptides=None, ev_conn=None, config=None, ev_config=None, label=None, log_fh=None):
default_product = config['general']['default_product_name']
lipoprotein_motif_default_product = ev_config['product_name']
ev_curs = ev_conn.cursor()
ev_qry = """
SELECT hit_acc, hit_desc, start, stop
FROM lipoprotein_motif_hit
WHERE qry_id = ?
"""
if 'debugging_polypeptide_limit' in config['general']:
DEBUG_LIMIT = config['general']['debugging_polypeptide_limit']
print("DEBUG: Applying lipoprotein_motif results to {0} polypeptides".format(len(polypeptides)))
for id in polypeptides:
#print("DEBUG: Parsing {0} evidence for polypeptide ID {1}".format(label, id))
polypeptide = polypeptides[id]
annot = polypeptide.annotation
DEBUG_LIMIT = DEBUG_LIMIT - 1
if DEBUG_LIMIT == 0:
break
if config['general']['allow_attributes_from_multiple_sources'] == 'Yes':
raise Exception("ERROR: Support for the general:allow_attributes_from_multiple_sources=Yes setting not yet implemented")
else:
if annot.product_name != default_product: continue
for ev_row in ev_curs.execute(ev_qry, (polypeptide.id,)):
log_fh.write("INFO: {0}: Set product name to '{1}' because it had a lipoprotein_motif match to accession:{2}, description:{3}\n".format(
id, lipoprotein_motif_default_product, ev_row[0], ev_row[1]))
annot.product_name = lipoprotein_motif_default_product
break
ev_curs.close()
def apply_tmhmm_evidence(polypeptides=None, ev_conn=None, config=None, ev_config=None, label=None, log_fh=None):
default_product = config['general']['default_product_name']
tmhmm_default_product = ev_config['product_name']
min_helical_spans = int(ev_config['min_helical_spans'])
ev_curs = ev_conn.cursor()
ev_qry = """
SELECT th.id, count(tp.hit_id)
FROM tmhmm_hit th
JOIN tmhmm_path tp ON th.id=tp.hit_id
WHERE th.qry_id = ?
"""
if 'debugging_polypeptide_limit' in config['general']:
DEBUG_LIMIT = config['general']['debugging_polypeptide_limit']
print("DEBUG: Applying TMHMM results to {0} polypeptides".format(len(polypeptides)))
for id in polypeptides:
#print("DEBUG: Parsing {0} evidence for polypeptide ID {1}".format(label, id))
polypeptide = polypeptides[id]
annot = polypeptide.annotation
DEBUG_LIMIT = DEBUG_LIMIT - 1
if DEBUG_LIMIT == 0:
break
if config['general']['allow_attributes_from_multiple_sources'] == 'Yes':
raise Exception("ERROR: Support for the general:allow_attributes_from_multiple_sources=Yes setting not yet implemented")
else:
if annot.product_name != default_product: continue
for ev_row in ev_curs.execute(ev_qry, (polypeptide.id,)):
if ev_row[1] >= min_helical_spans:
annot.product_name = tmhmm_default_product
log_fh.write("INFO: {0}: Set product name to '{1}' because it had a TMHMM prediction of {2} transmembrane helices\n".format(
id, tmhmm_default_product, ev_row[1]))
annot.add_go_annotation( biocode.annotation.GOAnnotation(go_id='0016021') )
break
ev_curs.close()
def cache_blast_hit_data(version=None, ref_curs=None, ev_curs=None):
"""
Gets annotation for a specific accession and copies its entry from the large source index into
our smaller hits-found-only evidence index.
"""
ref_blast_select_qry = """
SELECT e.id, e.full_name, e.organism, e.symbol, ea.accession, ea.res_length, ea.is_characterized
FROM entry e
JOIN entry_acc ea on ea.id=e.id
WHERE ea.accession = ?
"""
ev_blast_insert_qry = "INSERT INTO entry (id, full_name, organism, symbol) VALUES (?, ?, ?, ?)"
ev_acc_insert_qry = "INSERT INTO entry_acc (id, accession, res_length, is_characterized) VALUES (?, ?, ?, ?)"
ref_go_select_qry = "SELECT id, go_id FROM entry_go WHERE id = ?"
ev_go_insert_qry = "INSERT INTO entry_go (id, go_id) VALUES (?, ?)"
ref_ec_select_qry = "SELECT id, ec_num FROM entry_ec WHERE id = ?"
ev_ec_insert_qry = "INSERT INTO entry_ec (id, ec_num) VALUES (?, ?)"
ref_curs.execute(ref_blast_select_qry, (version,))
entry_row = ref_curs.fetchone()
if entry_row is not None:
entry_id = entry_row[0]
ev_curs.execute(ev_blast_insert_qry, (entry_id, entry_row[1], entry_row[2], entry_row[3]))
ev_curs.execute(ev_acc_insert_qry, (entry_id, entry_row[4], entry_row[5], entry_row[6]))
ref_curs.execute(ref_go_select_qry, (entry_id,))
for go_row in ref_curs:
ev_curs.execute(ev_go_insert_qry, (entry_id, go_row[1]))
ref_curs.execute(ref_ec_select_qry, (entry_id,))
for ec_row in ref_curs:
ev_curs.execute(ev_ec_insert_qry, (entry_id, ec_row[1]))
def cache_ev_blast_accessions(idx):
"""
Creates a list of the accession entries whose reference annotation attributes have already
been stored within this portable evidence DB.
"""
ev_qry = "SELECT accession FROM entry_acc"
ev_curs = idx.cursor()
ev_curs.execute(ev_qry)
accessions = dict()
for row in ev_curs:
accessions[row[0]] = True
return accessions
def cache_ev_hmm_version_accessions(idx):
"""
Creates a list of the HMM version entries whose reference annotation attributes have already
been stored within this portable evidence DB.
"""
ev_qry = "SELECT version FROM hmm"
ev_curs = idx.cursor()
ev_curs.execute(ev_qry)
versions = dict()
for row in ev_curs:
versions[row[0]] = True
return versions
def cache_hmm_hit_data(version=None, ev_curs=None, ref_curs=None):
"""
Checks to see if annotation for that HMM is stored within the evidence index. If not,
the relevant entries from the reference are copied.
"""
ref_qry = """
SELECT id, accession, version, name, hmm_com_name, hmm_len, hmm_comment, trusted_global_cutoff,
trusted_domain_cutoff, noise_global_cutoff, noise_domain_cutoff, gathering_global_cutoff,
gathering_domain_cutoff, ec_num, gene_symbol, isotype
FROM hmm
WHERE version = ?
"""
ref_go_select_qry = """
SELECT id, hmm_id, go_id
FROM hmm_go
WHERE hmm_id = ?
"""
ev_qry = """
INSERT INTO hmm (id, accession, version, name, hmm_com_name, hmm_len, hmm_comment, trusted_global_cutoff,
trusted_domain_cutoff, noise_global_cutoff, noise_domain_cutoff, gathering_global_cutoff,
gathering_domain_cutoff, ec_num, gene_symbol, isotype)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
"""
ev_go_insert_qry = "INSERT INTO hmm_go (id, hmm_id, go_id) VALUES (?, ?, ?)"
ref_curs.execute(ref_qry, (version,))
hmm_row = ref_curs.fetchone()
if hmm_row is not None:
ev_curs.execute(ev_qry, hmm_row)
ref_curs.execute(ref_go_select_qry, (hmm_row[0],))
for hmm_go_row in ref_curs:
ev_curs.execute(ev_go_insert_qry, hmm_go_row)
def check_configuration(conf, userargs):
"""
Performs any basic checks on the annotation configuration file format/syntax/values. Ideally done
before most of the rest of the script to save wasted compute time.
"""
# make sure each of the expected sections are there
for section in ['general', 'indexes', 'input', 'order', 'evidence']:
if section not in conf:
raise Exception("ERROR: Expected a section called '{0}' in the annotation config file, but didn't find one.".format(section))
# make sure the input section has at least fasta defined
if 'polypeptide_fasta' not in conf['input']:
raise Exception("ERROR: You must at least define 'polypeptide_fasta' data in the 'input' section of the annotation config file")
# the output format should be one of the recognized ones.
supported_output_formats = ['fasta', 'gff3']
userargs.output_format = userargs.output_format.lower()
if userargs.output_format not in supported_output_formats:
raise Exception("ERROR: The output format specified '{0}' isn't supported. Please choose from: {1}".format(userargs.output_format,
", ".join(supported_output_formats)))
# user must have defined input GFF if they've requested GFF as output.
if userargs.output_format == 'gff3':
if 'gff3' not in conf['input'] or conf['input']['gff3'] is None or len(conf['input']['gff3']) == 0:
raise Exception("ERROR: If you requested gff3 formatted output, you must specify the corresponding gff3 input file (which provides the genomic coordinates of the features involved.)")
# make sure there aren't any indexes referenced in the evidence section which are not defined in the indexes section
indexes = list()
for label in conf['indexes']:
indexes.append(label)
for item in conf['evidence']:
if 'index' in item and item['index'] not in indexes:
raise Exception("ERROR: Evidence item '{0}' references an index '{1}' not found in the indexes section of the config file".format(item['label'], item['index']))
# Any indexes defined should actually exist
for label in conf['indexes']:
if not os.path.exists(conf['indexes'][label]):
raise Exception("ERROR: Index with label:{0} and path:{1} couldn't be found".format(label, conf['indexes'][label]))
def get_blast_accession_info(conn=None, accession=None, config=None, acc_curs=None, ec_curs=None, go_curs=None):
annot = biocode.annotation.FunctionalAnnotation(product_name=config['general']['default_product_name'])
# First we need to get the ID from the accession
qry = """
SELECT e.id, ea.accession, e.full_name, e.symbol, ea.res_length, ea.is_characterized
FROM entry e
JOIN entry_acc ea ON e.id=ea.id
WHERE ea.accession = ?
"""
# SWISS-PROT accessions are in the format: sp|A4YVG3|RRF_BRASO
if accession.startswith('sp|'):
abbrev, accession, sprot_id = accession.split('|')
elif accession.startswith('UniRef100_'):
accession = accession.lstrip('UniRef100_')
for row in acc_curs.execute(qry, (accession,)):
entry_id = row[0]
annot.product_name = row[2]
annot.gene_symbol = row[3]
annot.other_attributes['ref_len'] = row[4]
annot.other_attributes['is_characterized'] = row[5]
qry = "SELECT ec_num FROM entry_ec WHERE id = ?"
for ec_row in ec_curs.execute(qry, (entry_id,)):
annot.add_ec_number( biocode.annotation.ECAnnotation(number=ec_row[0]) )
qry = "SELECT go_id FROM entry_go WHERE id = ?"
for go_row in go_curs.execute(qry, (entry_id,)):
annot.add_go_annotation( biocode.annotation.GOAnnotation(go_id=go_row[0], with_from=row[1]) )
return annot
def get_or_create_db_connections(type_ev=None, configuration=None, evidence=None, label=None,
db_conn=None, output_base=None):
"""
type_ev must be either 'hmm_ev', 'blast_ev' or 'tmhmm_ev'
"""
if type_ev in ['hmm_ev', 'blast_ev']:
index_label = evidence[label]['index']
elif type_ev in ['tmhmm_ev', 'lipoprotein_motif_ev']:
index_label = None
# Use any existing index connection, else attach to it.
index_conn = None
if index_label is not None:
if index_label in db_conn:
index_conn = db_conn[index_label]
else:
try:
index_conn = sqlite3.connect(configuration['indexes'][index_label])
except sqlite3.OperationalError as e:
raise Exception("ERROR: Failed to connect to evidence database {0} because {1}".format(configuration['indexes'][index_label], e))
db_conn[index_label] = index_conn
# Attach to or create an evidence database
ev_db_path = "{0}.{1}.sqlite3".format(output_base, type_ev)
if os.path.exists(ev_db_path):
ev_db_conn = sqlite3.connect(ev_db_path)
else:
ev_db_conn = sqlite3.connect(ev_db_path)
if type_ev == 'hmm_ev':
initialize_hmm_results_db(ev_db_conn)
elif type_ev == 'blast_ev':
initialize_blast_results_db(ev_db_conn)
elif type_ev == 'tmhmm_ev':
initialize_tmhmm_results_db(ev_db_conn)
elif type_ev == 'lipoprotein_motif_ev':
initialize_lipoprotein_motif_results_db(ev_db_conn)
db_conn[type_ev] = ev_db_conn
# only parse the evidence if the list isn't already in the database
if not already_indexed(path=evidence[label]['path'], index=ev_db_conn):
if type_ev == 'hmm_ev':
index_hmmer3_htab(path=evidence[label]['path'], ev_index=ev_db_conn, ref_index=index_conn)
ev_db_conn.commit()
# update the database search indexes
hmm_ev_db_curs = ev_db_conn.cursor()
hmm_ev_db_curs.execute("DROP INDEX IF EXISTS hmm_hit__qry_id")
hmm_ev_db_curs.execute("CREATE INDEX hmm_hit__qry_id ON hmm_hit (qry_id)")
hmm_ev_db_curs.close()
elif type_ev == 'blast_ev':
index_rapsearch2_m8(path=evidence[label]['path'], ev_index=ev_db_conn, ref_index=index_conn)
ev_db_conn.commit()
blast_ev_db_curs = ev_db_conn.cursor()
blast_ev_db_curs.execute("DROP INDEX IF EXISTS blast_hit__qry_id")
blast_ev_db_curs.execute("CREATE INDEX blast_hit__qry_id ON blast_hit (qry_id)")
blast_ev_db_curs.close()
elif type_ev == 'tmhmm_ev':
index_tmhmm_raw(path=evidence[label]['path'], index=ev_db_conn)
ev_db_conn.commit()
tmhmm_ev_db_curs = ev_db_conn.cursor()
tmhmm_ev_db_curs.execute("DROP INDEX IF EXISTS tmhmm_hit__qry_id")
tmhmm_ev_db_curs.execute("CREATE INDEX tmhmm_hit__qry_id ON tmhmm_hit (qry_id)")
tmhmm_ev_db_curs.execute("DROP INDEX IF EXISTS tmhmm_path__hit_id")
tmhmm_ev_db_curs.execute("CREATE INDEX tmhmm_path__hit_id ON tmhmm_path (hit_id)")
tmhmm_ev_db_curs.close()
elif type_ev == 'lipoprotein_motif_ev':
index_lipoprotein_motif(path=evidence[label]['path'], index=ev_db_conn)
ev_db_conn.commit()
lipo_ev_db_curs = ev_db_conn.cursor()
lipo_ev_db_curs.execute("DROP INDEX IF EXISTS lipoprotein_motif_hit__qry_id")
lipo_ev_db_curs.execute("CREATE INDEX lipoprotein_motif_hit__qry_id ON lipoprotein_motif_hit (qry_id)")
lipo_ev_db_curs.close()
ev_db_conn.commit()
return (index_conn, ev_db_conn)
def get_files_from_path(path):
# Can pass a single file, list file or comma-separated combination. We'll rely on the .list file extension here
path_entries = path.split(',')
paths = list()
for path in path_entries:
if path.endswith('.list'):
paths.extend(biocode.utils.read_list_file(path))
else:
paths.extend([path])
return paths
def index_hmmer3_htab(path=None, index=None, ev_index=None, ref_index=None):
ev_curs = ev_index.cursor()
ref_curs = ref_index.cursor()
parsing_errors = 0
qry = """
INSERT INTO hmm_hit (qry_id, qry_start, qry_end, hmm_accession, hmm_length, hmm_start, hmm_end,
domain_score, total_score, total_score_tc, total_score_nc, total_score_gc,
domain_score_tc, domain_score_nc, domain_score_gc, total_hit_eval,
domain_hit_eval)
VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)
"""
paths = get_files_from_path(path)
hmm_versions_found = cache_ev_hmm_version_accessions(ev_index)
for file in paths:
print("INFO: parsing: {0}".format(file))
for line in open(file):
line = line.rstrip()
cols = line.split("\t")
## not sure what this is, but some lines have columns 7+ as these values:
# hmm to ali to bias
if cols[6] == 'hmm': continue
# the following columns in the htab files are nullable, which are filled with various widths of '-' characters
for i in (6,7,8,9,11,19,20):
if '--' in cols[i]:
cols[i] = None
elif len(cols[i]) > 0:
try:
cols[i] = float(cols[i])
except ValueError:
parsing_errors += 1
cols[i] = None
ev_curs.execute(qry, (cols[5], cols[8], cols[9], cols[0], int(cols[2]), cols[6], cols[7],
cols[11], float(cols[12]), float(cols[17]), float(cols[18]), float(cols[23]),
float(cols[21]), float(cols[22]), float(cols[24]), cols[19], cols[20]))
if cols[0] not in hmm_versions_found:
cache_hmm_hit_data(version=cols[0], ev_curs=ev_index, ref_curs=ref_curs)
hmm_versions_found[cols[0]] = True
ev_curs.execute("INSERT INTO data_sources (source_path) VALUES (?)", (path,))
ev_curs.close()
ref_curs.close()
if parsing_errors > 0:
print("WARN: There were {0} parsing errors (columns converted to None) when processing {1}\n".format(parsing_errors, path))
def index_lipoprotein_motif(path=None, index=None):
curs = index.cursor()
parsing_errors = 0
qry = """
INSERT INTO lipoprotein_motif_hit (qry_id, hit_acc, hit_desc, start, stop)
VALUES (?, ?, ?, ?, ?)
"""
paths = get_files_from_path(path)
# http://www.diveintopython3.net/xml.html
for file in paths:
print("INFO: parsing: {0}".format(file))
tree = etree.parse(file)
for elem in tree.iterfind('Definitions/Sequences/Sequence'):
qry_id = elem.attrib['id']
for feature in elem.iterfind('Feature-tables/Feature-table/Feature'):
title = feature.attrib['title']
m = re.match("(\S+) \:\: (.+)", title)
if m:
hit_acc = m.group(1)
hit_desc = m.group(2)
interval = feature.find('Interval-loc')
curs.execute(qry, (qry_id, hit_acc, hit_desc, interval.attrib['startpos'],
interval.attrib['endpos']))
else:
parsing_errors += 1
print("WARN: Unable to parse accession and description from title in file: {0}".format(file))
curs.execute("INSERT INTO data_sources (source_path) VALUES (?)", (path,))
curs.close()
def index_rapsearch2_m8(path=None, ev_index=None, ref_index=None):
ref_curs = ref_index.cursor()
ev_curs = ev_index.cursor()
parsing_errors = 0
# The E-value column can be either the E-value directly or log(E-value), depending on
# the version and options used. Luckily, the header line tells us which it is.
logged_eval = False
qry = """
INSERT INTO blast_hit (qry_id, sbj_id, align_len, qry_start, qry_end, sbj_start,
sbj_end, perc_identity, eval, bit_score)
VALUES (?,?,?,?,?,?,?,?,?,?)
"""
paths = get_files_from_path(path)
accessions_found = cache_ev_blast_accessions(ev_index)
for file in paths:
print("INFO: parsing: {0}".format(file))
for line in open(file):
if line[0] == '#':
m = re.search('log\(e\-value\)', line)
if m:
logged_eval = True
continue
line = line.rstrip()
cols = line.split("\t")
if len(cols) != 12:
continue
if logged_eval == True:
try:
cols[10] = math.pow(10, float(cols[10]))
except OverflowError:
# RapSearch2 sometimes reports miniscule e-values, such a log(eval) of > 1000
# These are outside of the range of Python's double. In my checking of these
# though, their alignments don't warrant a low E-value at all. Skipping them.
print("WARN: Skipping a RAPSearch2 row: overflow error converting E-value ({0}) on line: {1}".format(cols[10], line))
parsing_errors += 1
continue
accession = cols[1]
if accession.startswith('sp|'):
abbrev, accession, sprot_id = accession.split('|')
elif accession.startswith('UniRef100_'):
accession = accession.lstrip('UniRef100_')
ev_curs.execute(qry, (cols[0], accession, int(cols[3]), int(cols[6]), int(cols[7]), int(cols[8]),
int(cols[9]), float(cols[2]), cols[10], float(cols[11])))
if cols[1] not in accessions_found:
cache_blast_hit_data(version=accession, ev_curs=ev_curs, ref_curs=ref_curs)
accessions_found[cols[1]] = True
ev_curs.execute("INSERT INTO data_sources (source_path) VALUES (?)", (path,))
ev_curs.close()
if parsing_errors > 0:
print("WARN: There were {0} parsing errors (match rows skipped) when processing {1}\n".format(parsing_errors, path))
def index_tmhmm_raw(path=None, index=None):
"""
Notes from the esteemed M Giglio:
The GO term to use would be GO:0016021 "integral component of membrane"
Or if you want to be more conservative you could go with GO:0016020 "membrane"
Depends on the evidence. For the prok pipe we are pretty conservative, we require five TMHMM
domains and then we call it putative integral membrane protein.
On ECO - in fact Marcus and I are the developers of ECO. It is an ontology of evidence types.
An annotation to an ECO term is used in conjunction with another annotation, like a GO term
(but many other types of annotation can, and are, used with ECO). It provides additional
information about the annotation. In fact for GO, the assignment of an evidence term along
with a GO term is a required part of a GO annotation. (ECO terms are the "evidence codes" in GO.)
INPUT: Expected TMHMM input (all HTML lines are skipped)
# CHARM010_V2.mRNA.887 Length: 904
# CHARM010_V2.mRNA.887 Number of predicted TMHs: 6
# CHARM010_V2.mRNA.887 Exp number of AAs in TMHs: 133.07638
# CHARM010_V2.mRNA.887 Exp number, first 60 AAs: 21.83212
# CHARM010_V2.mRNA.887 Total prob of N-in: 0.99994
# CHARM010_V2.mRNA.887 POSSIBLE N-term signal sequence
CHARM010_V2.mRNA.887 TMHMM2.0 inside 1 11
CHARM010_V2.mRNA.887 TMHMM2.0 TMhelix 12 34
CHARM010_V2.mRNA.887 TMHMM2.0 outside 35 712
CHARM010_V2.mRNA.887 TMHMM2.0 TMhelix 713 735
CHARM010_V2.mRNA.887 TMHMM2.0 inside 736 755
CHARM010_V2.mRNA.887 TMHMM2.0 TMhelix 756 773
CHARM010_V2.mRNA.887 TMHMM2.0 outside 774 782
CHARM010_V2.mRNA.887 TMHMM2.0 TMhelix 783 805
CHARM010_V2.mRNA.887 TMHMM2.0 inside 806 809
CHARM010_V2.mRNA.887 TMHMM2.0 TMhelix 810 832
CHARM010_V2.mRNA.887 TMHMM2.0 outside 833 871
CHARM010_V2.mRNA.887 TMHMM2.0 TMhelix 872 894
CHARM010_V2.mRNA.887 TMHMM2.0 inside 895 904
"""
curs = index.cursor()
parsing_errors = 0
hit_qry = """
INSERT INTO tmhmm_hit (qry_id, tmh_count, num_aa_in_tmhs, num_aa_in_f60, total_prob_n_in)
VALUES (?, ?, ?, ?, ?)
"""
path_qry = """
INSERT INTO tmhmm_path (hit_id, locus, start, stop)
VALUES (?, ?, ?, ?)
"""
paths = get_files_from_path(path)
for file in paths:
print("INFO: parsing: {0}".format(file))
last_qry_id = None
current_hit_id = None
current_path = list()
tmh_count = num_aa_in_tmhs = num_aa_in_f60 = total_prob_n_in = 0
for line in open(file):
# skip the HTML lines
if line.startswith('<'): continue
m = Match()
if m.match("# (.+?)\s+Length: \d+", line): # this line marks a new result
current_id = m.m.group(1)
# purge the previous result
if last_qry_id is not None:
curs.execute(hit_qry, (last_qry_id, tmh_count, num_aa_in_tmhs, num_aa_in_f60, total_prob_n_in))
current_hit_id = curs.lastrowid
for span in current_path:
curs.execute(path_qry, (current_hit_id, span[2], int(span[3]), int(span[4])))
# reset
last_qry_id = current_id
current_helix_count = tmh_count = num_aa_in_tmhs = num_aa_in_f60 = total_prob_n_in = 0
current_path = list()
elif m.match(".+Number of predicted TMHs:\s+(\d+)", line):
tmh_count = int(m.m.group(1))
elif m.match(".+Exp number of AAs in TMHs:\s+([0-9\.]+)", line):
num_aa_in_tmhs = float(m.m.group(1))
elif m.match(".+Exp number, first 60 AAs:\s+([0-9\.]+)", line):
num_aa_in_f60 = float(m.m.group(1))
elif m.match(".+Total prob of N-in:\s+([0-9\.]+)", line):
total_prob_n_in = float(m.m.group(1))
else:
if line[0] == '#': continue
cols = line.split()
if len(cols) == 5:
current_path.append(cols)
# don't forget to do the last entry
curs.execute(hit_qry, (last_qry_id, tmh_count, num_aa_in_tmhs, num_aa_in_f60, total_prob_n_in))
current_hit_id = curs.lastrowid
for span in current_path:
curs.execute(path_qry, (current_hit_id, span[2], int(span[3]), int(span[4])))
curs.execute("INSERT INTO data_sources (source_path) VALUES (?)", (path,))
curs.close()
index.commit()
if parsing_errors > 0:
print("WARN: There were {0} parsing errors (match rows skipped) when processing {1}\n".format(parsing_errors, path))
def initialize_blast_results_db(conn):
curs = conn.cursor()
curs.execute("""
CREATE TABLE blast_hit (
id integer primary key,
qry_id text,
sbj_id text,
align_len integer,
qry_start integer,
qry_end integer,
sbj_start integer,
sbj_end integer,
perc_identity real,
eval real,
bit_score real
)
""")
curs.execute("""
CREATE TABLE entry (
id text primary key,
full_name text,
organism text,
symbol text
)
""")
curs.execute("""
CREATE TABLE entry_acc (
id text not NULL,
accession text not NULL,
res_length integer,
is_characterized integer DEFAULT 0
)
""")
curs.execute("""
CREATE TABLE entry_go (
id text not NULL,
go_id text not NULL
)
""")
curs.execute("""
CREATE TABLE entry_ec (
id text not NULL,
ec_num text not NULL
)
""")
curs.execute("""
CREATE TABLE data_sources (
id integer primary key,
source_path text
)
""")
curs.close()
conn.commit()
def initialize_hmm_results_db(conn):
curs = conn.cursor()
curs.execute("""
CREATE TABLE hmm_hit (
id integer primary key,
qry_id text,
qry_start integer,
qry_end integer,
hmm_accession text,
hmm_length integer,
hmm_start integer,
hmm_end integer,
domain_score real,
total_score real,
total_score_tc real,
total_score_nc real,
total_score_gc real,
total_hit_eval real,
domain_score_tc real,
domain_score_nc real,
domain_score_gc real,
domain_hit_eval real
)
""")
curs.execute("""
CREATE TABLE hmm (
id integer primary key autoincrement,
accession text,
version text,
name text,
hmm_com_name text,
hmm_len int,
hmm_comment text,
trusted_global_cutoff float,
trusted_domain_cutoff float,
noise_global_cutoff float,
noise_domain_cutoff float,
gathering_global_cutoff float,
gathering_domain_cutoff float,
ec_num text,
gene_symbol text,
isotype text
)
""")
curs.execute("""
CREATE TABLE hmm_go (
id integer primary key autoincrement,
hmm_id int not NULL,
go_id text
)
""")
curs.execute("""
CREATE TABLE hmm_ec (
id integer primary key autoincrement,
hmm_id int not NULL,
ec_id text
)
""")
curs.execute("""
CREATE TABLE data_sources (
id integer primary key,
source_path text
)
""")
curs.close()
conn.commit()
def initialize_lipoprotein_motif_results_db(conn):
curs = conn.cursor()
curs.execute("""
CREATE TABLE lipoprotein_motif_hit (
id integer primary key,
qry_id text,
hit_acc text,
hit_desc text,
start integer,
stop integer
)
""")
curs.execute("""
CREATE TABLE data_sources (
id integer primary key,
source_path text
)
""")
curs.close()
conn.commit()
def initialize_tmhmm_results_db(conn):
curs = conn.cursor()
"""
tmh_count: Number of predicted Trans-Membrane Helices (TMHs)
num_aa_in_tmhs: Exp number of AAs in TMHs
num_aa_in_f60: Exp number of AAs in TMHs within first 60 AAs
"""
curs.execute("""
CREATE TABLE tmhmm_hit (
id integer primary key,
qry_id text,
tmh_count float,
num_aa_in_tmhs float,
num_aa_in_f60 float,
total_prob_n_in float
)
""")
curs.execute("""
CREATE TABLE tmhmm_path (
hit_id integer,
locus text,
start integer,
stop integer
)
""")
curs.execute("""
CREATE TABLE data_sources (
id integer primary key,
source_path text
)
""")
curs.close()
conn.commit()
def initialize_polypeptides( log_fh, fasta_file, default_name ):
'''
Reads a FASTA file of (presumably) polypeptide sequences and creates a dict of Polypeptide
objects, keyed by ID, with bioannotation.FunctionalAnnotation objects attached.
'''
seqs = biocode.utils.fasta_dict_from_file( fasta_file )
polypeptides = dict()
for seq_id in seqs:
polypeptide = biocode.things.Polypeptide( id=seq_id, length=len(seqs[seq_id]['s']), residues=seqs[seq_id]['s'] )
annotation = biocode.annotation.FunctionalAnnotation(product_name=default_name)
log_fh.write("INFO: {0}: Set initial product name to '{1}'\n".format(seq_id, default_name))
polypeptide.annotation = annotation
polypeptides[seq_id] = polypeptide
return polypeptides
def parse_evidence_config(conf):
"""
Parses the 'evidence' section of the annotation config file, and returns a dict where each key
is the label of that evidence and the value is a dict of the other key/value pairs
"""
ev = dict()
for entry in conf['evidence']:
# make sure there aren't duplicates
label = entry['label']
if label in ev:
raise Exception("ERROR: duplicate label found in evidence track: {0}".format(label))
else:
ev[label] = dict()
for key in entry:
if key is not 'label':
ev[label][key] = entry[key]
return ev
def perform_final_checks(polypeptides=None, config=None, log_fh=None):
"""
Does a round of checks we want to perform on an annotated set of polypeptides
before exporting them. Currently:
- Make sure a gene product name is assigned. This might accidentally become "None" if
a match existed to a subject which didn't have a name properly entered in the index.
"""
for id in polypeptides:
polypeptide = polypeptides[id]
if polypeptide.annotation.product_name is None:
log_fh.write("WARNING: {0}: Somehow made it through annotation with no product name. Setting to default\n".format(id))
polypeptide.annotation.product_name = config['general']['default_product_name']
class Match(object):
"""
Python doesn't really have a good syntax for doing a series of conditional checks on a line if you want
to also capture part of the matter and use it. This wraps the match object so that we can do exactly that.
Example use:
match = Match()
if match.match(pattern1, string1):
do_something( print(match.m.group(1)) )
elif match.match(pattern2, string2):
do_something_else( print(match.m.group(1)) )
"""
def __init__(self):
self.m = None
def match(self, *args, **kwds):
self.m = re.match(*args, **kwds)
return self.m is not None
if __name__ == '__main__':
cProfile.run('main()', "FALCON.profile.{0}.dat".format(os.getpid()))
|
jorvis/Attributor
|
assign_functional_annotation.py
|
Python
|
apache-2.0
| 52,345
|
[
"BLAST"
] |
53c50376c16b00f87ee6c064de46867831f1dce7e3fa9d86e7fb64a611d4b738
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.