id stringlengths 1 265 | text stringlengths 6 5.19M | dataset_id stringclasses 7 values |
|---|---|---|
/AyiinXd-0.0.8-cp311-cp311-macosx_10_9_universal2.whl/fipper/node_modules/@types/node/tls.d.ts | declare module 'tls' {
import { X509Certificate } from 'node:crypto';
import * as net from 'node:net';
import * as stream from 'stream';
const CLIENT_RENEG_LIMIT: number;
const CLIENT_RENEG_WINDOW: number;
interface Certificate {
/**
* Country code.
*/
C: string;
/**
* Street.
*/
ST: string;
/**
* Locality.
*/
L: string;
/**
* Organization.
*/
O: string;
/**
* Organizational unit.
*/
OU: string;
/**
* Common name.
*/
CN: string;
}
interface PeerCertificate {
/**
* `true` if a Certificate Authority (CA), `false` otherwise.
* @since v18.13.0
*/
ca: boolean;
/**
* The DER encoded X.509 certificate data.
*/
raw: Buffer;
/**
* The certificate subject.
*/
subject: Certificate;
/**
* The certificate issuer, described in the same terms as the `subject`.
*/
issuer: Certificate;
/**
* The date-time the certificate is valid from.
*/
valid_from: string;
/**
* The date-time the certificate is valid to.
*/
valid_to: string;
/**
* The certificate serial number, as a hex string.
*/
serialNumber: string;
/**
* The SHA-1 digest of the DER encoded certificate.
* It is returned as a `:` separated hexadecimal string.
*/
fingerprint: string;
/**
* The SHA-256 digest of the DER encoded certificate.
* It is returned as a `:` separated hexadecimal string.
*/
fingerprint256: string;
/**
* The SHA-512 digest of the DER encoded certificate.
* It is returned as a `:` separated hexadecimal string.
*/
fingerprint512: string;
/**
* The extended key usage, a set of OIDs.
*/
ext_key_usage?: string[];
/**
* A string containing concatenated names for the subject,
* an alternative to the `subject` names.
*/
subjectaltname?: string;
/**
* An array describing the AuthorityInfoAccess, used with OCSP.
*/
infoAccess?: NodeJS.Dict<string[]>;
/**
* For RSA keys: The RSA bit size.
*
* For EC keys: The key size in bits.
*/
bits?: number;
/**
* The RSA exponent, as a string in hexadecimal number notation.
*/
exponent?: string;
/**
* The RSA modulus, as a hexadecimal string.
*/
modulus?: string;
/**
* The public key.
*/
pubkey?: Buffer;
/**
* The ASN.1 name of the OID of the elliptic curve.
* Well-known curves are identified by an OID.
* While it is unusual, it is possible that the curve
* is identified by its mathematical properties,
* in which case it will not have an OID.
*/
asn1Curve?: string;
/**
* The NIST name for the elliptic curve,if it has one
* (not all well-known curves have been assigned names by NIST).
*/
nistCurve?: string;
}
interface DetailedPeerCertificate extends PeerCertificate {
/**
* The issuer certificate object.
* For self-signed certificates, this may be a circular reference.
*/
issuerCertificate: DetailedPeerCertificate;
}
interface CipherNameAndProtocol {
/**
* The cipher name.
*/
name: string;
/**
* SSL/TLS protocol version.
*/
version: string;
/**
* IETF name for the cipher suite.
*/
standardName: string;
}
interface EphemeralKeyInfo {
/**
* The supported types are 'DH' and 'ECDH'.
*/
type: string;
/**
* The name property is available only when type is 'ECDH'.
*/
name?: string | undefined;
/**
* The size of parameter of an ephemeral key exchange.
*/
size: number;
}
interface KeyObject {
/**
* Private keys in PEM format.
*/
pem: string | Buffer;
/**
* Optional passphrase.
*/
passphrase?: string | undefined;
}
interface PxfObject {
/**
* PFX or PKCS12 encoded private key and certificate chain.
*/
buf: string | Buffer;
/**
* Optional passphrase.
*/
passphrase?: string | undefined;
}
interface TLSSocketOptions extends SecureContextOptions, CommonConnectionOptions {
/**
* If true the TLS socket will be instantiated in server-mode.
* Defaults to false.
*/
isServer?: boolean | undefined;
/**
* An optional net.Server instance.
*/
server?: net.Server | undefined;
/**
* An optional Buffer instance containing a TLS session.
*/
session?: Buffer | undefined;
/**
* If true, specifies that the OCSP status request extension will be
* added to the client hello and an 'OCSPResponse' event will be
* emitted on the socket before establishing a secure communication
*/
requestOCSP?: boolean | undefined;
}
/**
* Performs transparent encryption of written data and all required TLS
* negotiation.
*
* Instances of `tls.TLSSocket` implement the duplex `Stream` interface.
*
* Methods that return TLS connection metadata (e.g.{@link TLSSocket.getPeerCertificate} will only return data while the
* connection is open.
* @since v0.11.4
*/
class TLSSocket extends net.Socket {
/**
* Construct a new tls.TLSSocket object from an existing TCP socket.
*/
constructor(socket: net.Socket, options?: TLSSocketOptions);
/**
* This property is `true` if the peer certificate was signed by one of the CAs
* specified when creating the `tls.TLSSocket` instance, otherwise `false`.
* @since v0.11.4
*/
authorized: boolean;
/**
* Returns the reason why the peer's certificate was not been verified. This
* property is set only when `tlsSocket.authorized === false`.
* @since v0.11.4
*/
authorizationError: Error;
/**
* Always returns `true`. This may be used to distinguish TLS sockets from regular`net.Socket` instances.
* @since v0.11.4
*/
encrypted: true;
/**
* String containing the selected ALPN protocol.
* Before a handshake has completed, this value is always null.
* When a handshake is completed but not ALPN protocol was selected, tlsSocket.alpnProtocol equals false.
*/
alpnProtocol: string | false | null;
/**
* Returns an object representing the local certificate. The returned object has
* some properties corresponding to the fields of the certificate.
*
* See {@link TLSSocket.getPeerCertificate} for an example of the certificate
* structure.
*
* If there is no local certificate, an empty object will be returned. If the
* socket has been destroyed, `null` will be returned.
* @since v11.2.0
*/
getCertificate(): PeerCertificate | object | null;
/**
* Returns an object containing information on the negotiated cipher suite.
*
* For example:
*
* ```json
* {
* "name": "AES128-SHA256",
* "standardName": "TLS_RSA_WITH_AES_128_CBC_SHA256",
* "version": "TLSv1.2"
* }
* ```
*
* See [SSL\_CIPHER\_get\_name](https://www.openssl.org/docs/man1.1.1/man3/SSL_CIPHER_get_name.html) for more information.
* @since v0.11.4
*/
getCipher(): CipherNameAndProtocol;
/**
* Returns an object representing the type, name, and size of parameter of
* an ephemeral key exchange in `perfect forward secrecy` on a client
* connection. It returns an empty object when the key exchange is not
* ephemeral. As this is only supported on a client socket; `null` is returned
* if called on a server socket. The supported types are `'DH'` and `'ECDH'`. The`name` property is available only when type is `'ECDH'`.
*
* For example: `{ type: 'ECDH', name: 'prime256v1', size: 256 }`.
* @since v5.0.0
*/
getEphemeralKeyInfo(): EphemeralKeyInfo | object | null;
/**
* As the `Finished` messages are message digests of the complete handshake
* (with a total of 192 bits for TLS 1.0 and more for SSL 3.0), they can
* be used for external authentication procedures when the authentication
* provided by SSL/TLS is not desired or is not enough.
*
* Corresponds to the `SSL_get_finished` routine in OpenSSL and may be used
* to implement the `tls-unique` channel binding from [RFC 5929](https://tools.ietf.org/html/rfc5929).
* @since v9.9.0
* @return The latest `Finished` message that has been sent to the socket as part of a SSL/TLS handshake, or `undefined` if no `Finished` message has been sent yet.
*/
getFinished(): Buffer | undefined;
/**
* Returns an object representing the peer's certificate. If the peer does not
* provide a certificate, an empty object will be returned. If the socket has been
* destroyed, `null` will be returned.
*
* If the full certificate chain was requested, each certificate will include an`issuerCertificate` property containing an object representing its issuer's
* certificate.
* @since v0.11.4
* @param detailed Include the full certificate chain if `true`, otherwise include just the peer's certificate.
* @return A certificate object.
*/
getPeerCertificate(detailed: true): DetailedPeerCertificate;
getPeerCertificate(detailed?: false): PeerCertificate;
getPeerCertificate(detailed?: boolean): PeerCertificate | DetailedPeerCertificate;
/**
* As the `Finished` messages are message digests of the complete handshake
* (with a total of 192 bits for TLS 1.0 and more for SSL 3.0), they can
* be used for external authentication procedures when the authentication
* provided by SSL/TLS is not desired or is not enough.
*
* Corresponds to the `SSL_get_peer_finished` routine in OpenSSL and may be used
* to implement the `tls-unique` channel binding from [RFC 5929](https://tools.ietf.org/html/rfc5929).
* @since v9.9.0
* @return The latest `Finished` message that is expected or has actually been received from the socket as part of a SSL/TLS handshake, or `undefined` if there is no `Finished` message so
* far.
*/
getPeerFinished(): Buffer | undefined;
/**
* Returns a string containing the negotiated SSL/TLS protocol version of the
* current connection. The value `'unknown'` will be returned for connected
* sockets that have not completed the handshaking process. The value `null` will
* be returned for server sockets or disconnected client sockets.
*
* Protocol versions are:
*
* * `'SSLv3'`
* * `'TLSv1'`
* * `'TLSv1.1'`
* * `'TLSv1.2'`
* * `'TLSv1.3'`
*
* See the OpenSSL [`SSL_get_version`](https://www.openssl.org/docs/man1.1.1/man3/SSL_get_version.html) documentation for more information.
* @since v5.7.0
*/
getProtocol(): string | null;
/**
* Returns the TLS session data or `undefined` if no session was
* negotiated. On the client, the data can be provided to the `session` option of {@link connect} to resume the connection. On the server, it may be useful
* for debugging.
*
* See `Session Resumption` for more information.
*
* Note: `getSession()` works only for TLSv1.2 and below. For TLSv1.3, applications
* must use the `'session'` event (it also works for TLSv1.2 and below).
* @since v0.11.4
*/
getSession(): Buffer | undefined;
/**
* See [SSL\_get\_shared\_sigalgs](https://www.openssl.org/docs/man1.1.1/man3/SSL_get_shared_sigalgs.html) for more information.
* @since v12.11.0
* @return List of signature algorithms shared between the server and the client in the order of decreasing preference.
*/
getSharedSigalgs(): string[];
/**
* For a client, returns the TLS session ticket if one is available, or`undefined`. For a server, always returns `undefined`.
*
* It may be useful for debugging.
*
* See `Session Resumption` for more information.
* @since v0.11.4
*/
getTLSTicket(): Buffer | undefined;
/**
* See `Session Resumption` for more information.
* @since v0.5.6
* @return `true` if the session was reused, `false` otherwise.
*/
isSessionReused(): boolean;
/**
* The `tlsSocket.renegotiate()` method initiates a TLS renegotiation process.
* Upon completion, the `callback` function will be passed a single argument
* that is either an `Error` (if the request failed) or `null`.
*
* This method can be used to request a peer's certificate after the secure
* connection has been established.
*
* When running as the server, the socket will be destroyed with an error after`handshakeTimeout` timeout.
*
* For TLSv1.3, renegotiation cannot be initiated, it is not supported by the
* protocol.
* @since v0.11.8
* @param callback If `renegotiate()` returned `true`, callback is attached once to the `'secure'` event. If `renegotiate()` returned `false`, `callback` will be called in the next tick with
* an error, unless the `tlsSocket` has been destroyed, in which case `callback` will not be called at all.
* @return `true` if renegotiation was initiated, `false` otherwise.
*/
renegotiate(
options: {
rejectUnauthorized?: boolean | undefined;
requestCert?: boolean | undefined;
},
callback: (err: Error | null) => void
): undefined | boolean;
/**
* The `tlsSocket.setMaxSendFragment()` method sets the maximum TLS fragment size.
* Returns `true` if setting the limit succeeded; `false` otherwise.
*
* Smaller fragment sizes decrease the buffering latency on the client: larger
* fragments are buffered by the TLS layer until the entire fragment is received
* and its integrity is verified; large fragments can span multiple roundtrips
* and their processing can be delayed due to packet loss or reordering. However,
* smaller fragments add extra TLS framing bytes and CPU overhead, which may
* decrease overall server throughput.
* @since v0.11.11
* @param [size=16384] The maximum TLS fragment size. The maximum value is `16384`.
*/
setMaxSendFragment(size: number): boolean;
/**
* Disables TLS renegotiation for this `TLSSocket` instance. Once called, attempts
* to renegotiate will trigger an `'error'` event on the `TLSSocket`.
* @since v8.4.0
*/
disableRenegotiation(): void;
/**
* When enabled, TLS packet trace information is written to `stderr`. This can be
* used to debug TLS connection problems.
*
* The format of the output is identical to the output of`openssl s_client -trace` or `openssl s_server -trace`. While it is produced by
* OpenSSL's `SSL_trace()` function, the format is undocumented, can change
* without notice, and should not be relied on.
* @since v12.2.0
*/
enableTrace(): void;
/**
* Returns the peer certificate as an `X509Certificate` object.
*
* If there is no peer certificate, or the socket has been destroyed,`undefined` will be returned.
* @since v15.9.0
*/
getPeerX509Certificate(): X509Certificate | undefined;
/**
* Returns the local certificate as an `X509Certificate` object.
*
* If there is no local certificate, or the socket has been destroyed,`undefined` will be returned.
* @since v15.9.0
*/
getX509Certificate(): X509Certificate | undefined;
/**
* Keying material is used for validations to prevent different kind of attacks in
* network protocols, for example in the specifications of IEEE 802.1X.
*
* Example
*
* ```js
* const keyingMaterial = tlsSocket.exportKeyingMaterial(
* 128,
* 'client finished');
*
* /*
* Example return value of keyingMaterial:
* <Buffer 76 26 af 99 c5 56 8e 42 09 91 ef 9f 93 cb ad 6c 7b 65 f8 53 f1 d8 d9
* 12 5a 33 b8 b5 25 df 7b 37 9f e0 e2 4f b8 67 83 a3 2f cd 5d 41 42 4c 91
* 74 ef 2c ... 78 more bytes>
*
* ```
*
* See the OpenSSL [`SSL_export_keying_material`](https://www.openssl.org/docs/man1.1.1/man3/SSL_export_keying_material.html) documentation for more
* information.
* @since v13.10.0, v12.17.0
* @param length number of bytes to retrieve from keying material
* @param label an application specific label, typically this will be a value from the [IANA Exporter Label
* Registry](https://www.iana.org/assignments/tls-parameters/tls-parameters.xhtml#exporter-labels).
* @param context Optionally provide a context.
* @return requested bytes of the keying material
*/
exportKeyingMaterial(length: number, label: string, context: Buffer): Buffer;
addListener(event: string, listener: (...args: any[]) => void): this;
addListener(event: 'OCSPResponse', listener: (response: Buffer) => void): this;
addListener(event: 'secureConnect', listener: () => void): this;
addListener(event: 'session', listener: (session: Buffer) => void): this;
addListener(event: 'keylog', listener: (line: Buffer) => void): this;
emit(event: string | symbol, ...args: any[]): boolean;
emit(event: 'OCSPResponse', response: Buffer): boolean;
emit(event: 'secureConnect'): boolean;
emit(event: 'session', session: Buffer): boolean;
emit(event: 'keylog', line: Buffer): boolean;
on(event: string, listener: (...args: any[]) => void): this;
on(event: 'OCSPResponse', listener: (response: Buffer) => void): this;
on(event: 'secureConnect', listener: () => void): this;
on(event: 'session', listener: (session: Buffer) => void): this;
on(event: 'keylog', listener: (line: Buffer) => void): this;
once(event: string, listener: (...args: any[]) => void): this;
once(event: 'OCSPResponse', listener: (response: Buffer) => void): this;
once(event: 'secureConnect', listener: () => void): this;
once(event: 'session', listener: (session: Buffer) => void): this;
once(event: 'keylog', listener: (line: Buffer) => void): this;
prependListener(event: string, listener: (...args: any[]) => void): this;
prependListener(event: 'OCSPResponse', listener: (response: Buffer) => void): this;
prependListener(event: 'secureConnect', listener: () => void): this;
prependListener(event: 'session', listener: (session: Buffer) => void): this;
prependListener(event: 'keylog', listener: (line: Buffer) => void): this;
prependOnceListener(event: string, listener: (...args: any[]) => void): this;
prependOnceListener(event: 'OCSPResponse', listener: (response: Buffer) => void): this;
prependOnceListener(event: 'secureConnect', listener: () => void): this;
prependOnceListener(event: 'session', listener: (session: Buffer) => void): this;
prependOnceListener(event: 'keylog', listener: (line: Buffer) => void): this;
}
interface CommonConnectionOptions {
/**
* An optional TLS context object from tls.createSecureContext()
*/
secureContext?: SecureContext | undefined;
/**
* When enabled, TLS packet trace information is written to `stderr`. This can be
* used to debug TLS connection problems.
* @default false
*/
enableTrace?: boolean | undefined;
/**
* If true the server will request a certificate from clients that
* connect and attempt to verify that certificate. Defaults to
* false.
*/
requestCert?: boolean | undefined;
/**
* An array of strings or a Buffer naming possible ALPN protocols.
* (Protocols should be ordered by their priority.)
*/
ALPNProtocols?: string[] | Uint8Array[] | Uint8Array | undefined;
/**
* SNICallback(servername, cb) <Function> A function that will be
* called if the client supports SNI TLS extension. Two arguments
* will be passed when called: servername and cb. SNICallback should
* invoke cb(null, ctx), where ctx is a SecureContext instance.
* (tls.createSecureContext(...) can be used to get a proper
* SecureContext.) If SNICallback wasn't provided the default callback
* with high-level API will be used (see below).
*/
SNICallback?: ((servername: string, cb: (err: Error | null, ctx?: SecureContext) => void) => void) | undefined;
/**
* If true the server will reject any connection which is not
* authorized with the list of supplied CAs. This option only has an
* effect if requestCert is true.
* @default true
*/
rejectUnauthorized?: boolean | undefined;
}
interface TlsOptions extends SecureContextOptions, CommonConnectionOptions, net.ServerOpts {
/**
* Abort the connection if the SSL/TLS handshake does not finish in the
* specified number of milliseconds. A 'tlsClientError' is emitted on
* the tls.Server object whenever a handshake times out. Default:
* 120000 (120 seconds).
*/
handshakeTimeout?: number | undefined;
/**
* The number of seconds after which a TLS session created by the
* server will no longer be resumable. See Session Resumption for more
* information. Default: 300.
*/
sessionTimeout?: number | undefined;
/**
* 48-bytes of cryptographically strong pseudo-random data.
*/
ticketKeys?: Buffer | undefined;
/**
*
* @param socket
* @param identity identity parameter sent from the client.
* @return pre-shared key that must either be
* a buffer or `null` to stop the negotiation process. Returned PSK must be
* compatible with the selected cipher's digest.
*
* When negotiating TLS-PSK (pre-shared keys), this function is called
* with the identity provided by the client.
* If the return value is `null` the negotiation process will stop and an
* "unknown_psk_identity" alert message will be sent to the other party.
* If the server wishes to hide the fact that the PSK identity was not known,
* the callback must provide some random data as `psk` to make the connection
* fail with "decrypt_error" before negotiation is finished.
* PSK ciphers are disabled by default, and using TLS-PSK thus
* requires explicitly specifying a cipher suite with the `ciphers` option.
* More information can be found in the RFC 4279.
*/
pskCallback?(socket: TLSSocket, identity: string): DataView | NodeJS.TypedArray | null;
/**
* hint to send to a client to help
* with selecting the identity during TLS-PSK negotiation. Will be ignored
* in TLS 1.3. Upon failing to set pskIdentityHint `tlsClientError` will be
* emitted with `ERR_TLS_PSK_SET_IDENTIY_HINT_FAILED` code.
*/
pskIdentityHint?: string | undefined;
}
interface PSKCallbackNegotation {
psk: DataView | NodeJS.TypedArray;
identity: string;
}
interface ConnectionOptions extends SecureContextOptions, CommonConnectionOptions {
host?: string | undefined;
port?: number | undefined;
path?: string | undefined; // Creates unix socket connection to path. If this option is specified, `host` and `port` are ignored.
socket?: stream.Duplex | undefined; // Establish secure connection on a given socket rather than creating a new socket
checkServerIdentity?: typeof checkServerIdentity | undefined;
servername?: string | undefined; // SNI TLS Extension
session?: Buffer | undefined;
minDHSize?: number | undefined;
lookup?: net.LookupFunction | undefined;
timeout?: number | undefined;
/**
* When negotiating TLS-PSK (pre-shared keys), this function is called
* with optional identity `hint` provided by the server or `null`
* in case of TLS 1.3 where `hint` was removed.
* It will be necessary to provide a custom `tls.checkServerIdentity()`
* for the connection as the default one will try to check hostname/IP
* of the server against the certificate but that's not applicable for PSK
* because there won't be a certificate present.
* More information can be found in the RFC 4279.
*
* @param hint message sent from the server to help client
* decide which identity to use during negotiation.
* Always `null` if TLS 1.3 is used.
* @returns Return `null` to stop the negotiation process. `psk` must be
* compatible with the selected cipher's digest.
* `identity` must use UTF-8 encoding.
*/
pskCallback?(hint: string | null): PSKCallbackNegotation | null;
}
/**
* Accepts encrypted connections using TLS or SSL.
* @since v0.3.2
*/
class Server extends net.Server {
constructor(secureConnectionListener?: (socket: TLSSocket) => void);
constructor(options: TlsOptions, secureConnectionListener?: (socket: TLSSocket) => void);
/**
* The `server.addContext()` method adds a secure context that will be used if
* the client request's SNI name matches the supplied `hostname` (or wildcard).
*
* When there are multiple matching contexts, the most recently added one is
* used.
* @since v0.5.3
* @param hostname A SNI host name or wildcard (e.g. `'*'`)
* @param context An object containing any of the possible properties from the {@link createSecureContext} `options` arguments (e.g. `key`, `cert`, `ca`, etc).
*/
addContext(hostname: string, context: SecureContextOptions): void;
/**
* Returns the session ticket keys.
*
* See `Session Resumption` for more information.
* @since v3.0.0
* @return A 48-byte buffer containing the session ticket keys.
*/
getTicketKeys(): Buffer;
/**
* The `server.setSecureContext()` method replaces the secure context of an
* existing server. Existing connections to the server are not interrupted.
* @since v11.0.0
* @param options An object containing any of the possible properties from the {@link createSecureContext} `options` arguments (e.g. `key`, `cert`, `ca`, etc).
*/
setSecureContext(options: SecureContextOptions): void;
/**
* Sets the session ticket keys.
*
* Changes to the ticket keys are effective only for future server connections.
* Existing or currently pending server connections will use the previous keys.
*
* See `Session Resumption` for more information.
* @since v3.0.0
* @param keys A 48-byte buffer containing the session ticket keys.
*/
setTicketKeys(keys: Buffer): void;
/**
* events.EventEmitter
* 1. tlsClientError
* 2. newSession
* 3. OCSPRequest
* 4. resumeSession
* 5. secureConnection
* 6. keylog
*/
addListener(event: string, listener: (...args: any[]) => void): this;
addListener(event: 'tlsClientError', listener: (err: Error, tlsSocket: TLSSocket) => void): this;
addListener(event: 'newSession', listener: (sessionId: Buffer, sessionData: Buffer, callback: () => void) => void): this;
addListener(event: 'OCSPRequest', listener: (certificate: Buffer, issuer: Buffer, callback: (err: Error | null, resp: Buffer) => void) => void): this;
addListener(event: 'resumeSession', listener: (sessionId: Buffer, callback: (err: Error | null, sessionData: Buffer | null) => void) => void): this;
addListener(event: 'secureConnection', listener: (tlsSocket: TLSSocket) => void): this;
addListener(event: 'keylog', listener: (line: Buffer, tlsSocket: TLSSocket) => void): this;
emit(event: string | symbol, ...args: any[]): boolean;
emit(event: 'tlsClientError', err: Error, tlsSocket: TLSSocket): boolean;
emit(event: 'newSession', sessionId: Buffer, sessionData: Buffer, callback: () => void): boolean;
emit(event: 'OCSPRequest', certificate: Buffer, issuer: Buffer, callback: (err: Error | null, resp: Buffer) => void): boolean;
emit(event: 'resumeSession', sessionId: Buffer, callback: (err: Error | null, sessionData: Buffer | null) => void): boolean;
emit(event: 'secureConnection', tlsSocket: TLSSocket): boolean;
emit(event: 'keylog', line: Buffer, tlsSocket: TLSSocket): boolean;
on(event: string, listener: (...args: any[]) => void): this;
on(event: 'tlsClientError', listener: (err: Error, tlsSocket: TLSSocket) => void): this;
on(event: 'newSession', listener: (sessionId: Buffer, sessionData: Buffer, callback: () => void) => void): this;
on(event: 'OCSPRequest', listener: (certificate: Buffer, issuer: Buffer, callback: (err: Error | null, resp: Buffer) => void) => void): this;
on(event: 'resumeSession', listener: (sessionId: Buffer, callback: (err: Error | null, sessionData: Buffer | null) => void) => void): this;
on(event: 'secureConnection', listener: (tlsSocket: TLSSocket) => void): this;
on(event: 'keylog', listener: (line: Buffer, tlsSocket: TLSSocket) => void): this;
once(event: string, listener: (...args: any[]) => void): this;
once(event: 'tlsClientError', listener: (err: Error, tlsSocket: TLSSocket) => void): this;
once(event: 'newSession', listener: (sessionId: Buffer, sessionData: Buffer, callback: () => void) => void): this;
once(event: 'OCSPRequest', listener: (certificate: Buffer, issuer: Buffer, callback: (err: Error | null, resp: Buffer) => void) => void): this;
once(event: 'resumeSession', listener: (sessionId: Buffer, callback: (err: Error | null, sessionData: Buffer | null) => void) => void): this;
once(event: 'secureConnection', listener: (tlsSocket: TLSSocket) => void): this;
once(event: 'keylog', listener: (line: Buffer, tlsSocket: TLSSocket) => void): this;
prependListener(event: string, listener: (...args: any[]) => void): this;
prependListener(event: 'tlsClientError', listener: (err: Error, tlsSocket: TLSSocket) => void): this;
prependListener(event: 'newSession', listener: (sessionId: Buffer, sessionData: Buffer, callback: () => void) => void): this;
prependListener(event: 'OCSPRequest', listener: (certificate: Buffer, issuer: Buffer, callback: (err: Error | null, resp: Buffer) => void) => void): this;
prependListener(event: 'resumeSession', listener: (sessionId: Buffer, callback: (err: Error | null, sessionData: Buffer | null) => void) => void): this;
prependListener(event: 'secureConnection', listener: (tlsSocket: TLSSocket) => void): this;
prependListener(event: 'keylog', listener: (line: Buffer, tlsSocket: TLSSocket) => void): this;
prependOnceListener(event: string, listener: (...args: any[]) => void): this;
prependOnceListener(event: 'tlsClientError', listener: (err: Error, tlsSocket: TLSSocket) => void): this;
prependOnceListener(event: 'newSession', listener: (sessionId: Buffer, sessionData: Buffer, callback: () => void) => void): this;
prependOnceListener(event: 'OCSPRequest', listener: (certificate: Buffer, issuer: Buffer, callback: (err: Error | null, resp: Buffer) => void) => void): this;
prependOnceListener(event: 'resumeSession', listener: (sessionId: Buffer, callback: (err: Error | null, sessionData: Buffer | null) => void) => void): this;
prependOnceListener(event: 'secureConnection', listener: (tlsSocket: TLSSocket) => void): this;
prependOnceListener(event: 'keylog', listener: (line: Buffer, tlsSocket: TLSSocket) => void): this;
}
/**
* @deprecated since v0.11.3 Use `tls.TLSSocket` instead.
*/
interface SecurePair {
encrypted: TLSSocket;
cleartext: TLSSocket;
}
type SecureVersion = 'TLSv1.3' | 'TLSv1.2' | 'TLSv1.1' | 'TLSv1';
interface SecureContextOptions {
/**
* Optionally override the trusted CA certificates. Default is to trust
* the well-known CAs curated by Mozilla. Mozilla's CAs are completely
* replaced when CAs are explicitly specified using this option.
*/
ca?: string | Buffer | Array<string | Buffer> | undefined;
/**
* Cert chains in PEM format. One cert chain should be provided per
* private key. Each cert chain should consist of the PEM formatted
* certificate for a provided private key, followed by the PEM
* formatted intermediate certificates (if any), in order, and not
* including the root CA (the root CA must be pre-known to the peer,
* see ca). When providing multiple cert chains, they do not have to
* be in the same order as their private keys in key. If the
* intermediate certificates are not provided, the peer will not be
* able to validate the certificate, and the handshake will fail.
*/
cert?: string | Buffer | Array<string | Buffer> | undefined;
/**
* Colon-separated list of supported signature algorithms. The list
* can contain digest algorithms (SHA256, MD5 etc.), public key
* algorithms (RSA-PSS, ECDSA etc.), combination of both (e.g
* 'RSA+SHA384') or TLS v1.3 scheme names (e.g. rsa_pss_pss_sha512).
*/
sigalgs?: string | undefined;
/**
* Cipher suite specification, replacing the default. For more
* information, see modifying the default cipher suite. Permitted
* ciphers can be obtained via tls.getCiphers(). Cipher names must be
* uppercased in order for OpenSSL to accept them.
*/
ciphers?: string | undefined;
/**
* Name of an OpenSSL engine which can provide the client certificate.
*/
clientCertEngine?: string | undefined;
/**
* PEM formatted CRLs (Certificate Revocation Lists).
*/
crl?: string | Buffer | Array<string | Buffer> | undefined;
/**
* `'auto'` or custom Diffie-Hellman parameters, required for non-ECDHE perfect forward secrecy.
* If omitted or invalid, the parameters are silently discarded and DHE ciphers will not be available.
* ECDHE-based perfect forward secrecy will still be available.
*/
dhparam?: string | Buffer | undefined;
/**
* A string describing a named curve or a colon separated list of curve
* NIDs or names, for example P-521:P-384:P-256, to use for ECDH key
* agreement. Set to auto to select the curve automatically. Use
* crypto.getCurves() to obtain a list of available curve names. On
* recent releases, openssl ecparam -list_curves will also display the
* name and description of each available elliptic curve. Default:
* tls.DEFAULT_ECDH_CURVE.
*/
ecdhCurve?: string | undefined;
/**
* Attempt to use the server's cipher suite preferences instead of the
* client's. When true, causes SSL_OP_CIPHER_SERVER_PREFERENCE to be
* set in secureOptions
*/
honorCipherOrder?: boolean | undefined;
/**
* Private keys in PEM format. PEM allows the option of private keys
* being encrypted. Encrypted keys will be decrypted with
* options.passphrase. Multiple keys using different algorithms can be
* provided either as an array of unencrypted key strings or buffers,
* or an array of objects in the form {pem: <string|buffer>[,
* passphrase: <string>]}. The object form can only occur in an array.
* object.passphrase is optional. Encrypted keys will be decrypted with
* object.passphrase if provided, or options.passphrase if it is not.
*/
key?: string | Buffer | Array<string | Buffer | KeyObject> | undefined;
/**
* Name of an OpenSSL engine to get private key from. Should be used
* together with privateKeyIdentifier.
*/
privateKeyEngine?: string | undefined;
/**
* Identifier of a private key managed by an OpenSSL engine. Should be
* used together with privateKeyEngine. Should not be set together with
* key, because both options define a private key in different ways.
*/
privateKeyIdentifier?: string | undefined;
/**
* Optionally set the maximum TLS version to allow. One
* of `'TLSv1.3'`, `'TLSv1.2'`, `'TLSv1.1'`, or `'TLSv1'`. Cannot be specified along with the
* `secureProtocol` option, use one or the other.
* **Default:** `'TLSv1.3'`, unless changed using CLI options. Using
* `--tls-max-v1.2` sets the default to `'TLSv1.2'`. Using `--tls-max-v1.3` sets the default to
* `'TLSv1.3'`. If multiple of the options are provided, the highest maximum is used.
*/
maxVersion?: SecureVersion | undefined;
/**
* Optionally set the minimum TLS version to allow. One
* of `'TLSv1.3'`, `'TLSv1.2'`, `'TLSv1.1'`, or `'TLSv1'`. Cannot be specified along with the
* `secureProtocol` option, use one or the other. It is not recommended to use
* less than TLSv1.2, but it may be required for interoperability.
* **Default:** `'TLSv1.2'`, unless changed using CLI options. Using
* `--tls-v1.0` sets the default to `'TLSv1'`. Using `--tls-v1.1` sets the default to
* `'TLSv1.1'`. Using `--tls-min-v1.3` sets the default to
* 'TLSv1.3'. If multiple of the options are provided, the lowest minimum is used.
*/
minVersion?: SecureVersion | undefined;
/**
* Shared passphrase used for a single private key and/or a PFX.
*/
passphrase?: string | undefined;
/**
* PFX or PKCS12 encoded private key and certificate chain. pfx is an
* alternative to providing key and cert individually. PFX is usually
* encrypted, if it is, passphrase will be used to decrypt it. Multiple
* PFX can be provided either as an array of unencrypted PFX buffers,
* or an array of objects in the form {buf: <string|buffer>[,
* passphrase: <string>]}. The object form can only occur in an array.
* object.passphrase is optional. Encrypted PFX will be decrypted with
* object.passphrase if provided, or options.passphrase if it is not.
*/
pfx?: string | Buffer | Array<string | Buffer | PxfObject> | undefined;
/**
* Optionally affect the OpenSSL protocol behavior, which is not
* usually necessary. This should be used carefully if at all! Value is
* a numeric bitmask of the SSL_OP_* options from OpenSSL Options
*/
secureOptions?: number | undefined; // Value is a numeric bitmask of the `SSL_OP_*` options
/**
* Legacy mechanism to select the TLS protocol version to use, it does
* not support independent control of the minimum and maximum version,
* and does not support limiting the protocol to TLSv1.3. Use
* minVersion and maxVersion instead. The possible values are listed as
* SSL_METHODS, use the function names as strings. For example, use
* 'TLSv1_1_method' to force TLS version 1.1, or 'TLS_method' to allow
* any TLS protocol version up to TLSv1.3. It is not recommended to use
* TLS versions less than 1.2, but it may be required for
* interoperability. Default: none, see minVersion.
*/
secureProtocol?: string | undefined;
/**
* Opaque identifier used by servers to ensure session state is not
* shared between applications. Unused by clients.
*/
sessionIdContext?: string | undefined;
/**
* 48-bytes of cryptographically strong pseudo-random data.
* See Session Resumption for more information.
*/
ticketKeys?: Buffer | undefined;
/**
* The number of seconds after which a TLS session created by the
* server will no longer be resumable. See Session Resumption for more
* information. Default: 300.
*/
sessionTimeout?: number | undefined;
}
interface SecureContext {
context: any;
}
/**
* Verifies the certificate `cert` is issued to `hostname`.
*
* Returns [Error](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Error) object, populating it with `reason`, `host`, and `cert` on
* failure. On success, returns [undefined](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Undefined_type).
*
* This function is intended to be used in combination with the`checkServerIdentity` option that can be passed to {@link connect} and as
* such operates on a `certificate object`. For other purposes, consider using `x509.checkHost()` instead.
*
* This function can be overwritten by providing an alternative function as the`options.checkServerIdentity` option that is passed to `tls.connect()`. The
* overwriting function can call `tls.checkServerIdentity()` of course, to augment
* the checks done with additional verification.
*
* This function is only called if the certificate passed all other checks, such as
* being issued by trusted CA (`options.ca`).
*
* Earlier versions of Node.js incorrectly accepted certificates for a given`hostname` if a matching `uniformResourceIdentifier` subject alternative name
* was present (see [CVE-2021-44531](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-44531)). Applications that wish to accept`uniformResourceIdentifier` subject alternative names can use
* a custom`options.checkServerIdentity` function that implements the desired behavior.
* @since v0.8.4
* @param hostname The host name or IP address to verify the certificate against.
* @param cert A `certificate object` representing the peer's certificate.
*/
function checkServerIdentity(hostname: string, cert: PeerCertificate): Error | undefined;
/**
* Creates a new {@link Server}. The `secureConnectionListener`, if provided, is
* automatically set as a listener for the `'secureConnection'` event.
*
* The `ticketKeys` options is automatically shared between `cluster` module
* workers.
*
* The following illustrates a simple echo server:
*
* ```js
* const tls = require('tls');
* const fs = require('fs');
*
* const options = {
* key: fs.readFileSync('server-key.pem'),
* cert: fs.readFileSync('server-cert.pem'),
*
* // This is necessary only if using client certificate authentication.
* requestCert: true,
*
* // This is necessary only if the client uses a self-signed certificate.
* ca: [ fs.readFileSync('client-cert.pem') ]
* };
*
* const server = tls.createServer(options, (socket) => {
* console.log('server connected',
* socket.authorized ? 'authorized' : 'unauthorized');
* socket.write('welcome!\n');
* socket.setEncoding('utf8');
* socket.pipe(socket);
* });
* server.listen(8000, () => {
* console.log('server bound');
* });
* ```
*
* The server can be tested by connecting to it using the example client from {@link connect}.
* @since v0.3.2
*/
function createServer(secureConnectionListener?: (socket: TLSSocket) => void): Server;
function createServer(options: TlsOptions, secureConnectionListener?: (socket: TLSSocket) => void): Server;
/**
* The `callback` function, if specified, will be added as a listener for the `'secureConnect'` event.
*
* `tls.connect()` returns a {@link TLSSocket} object.
*
* Unlike the `https` API, `tls.connect()` does not enable the
* SNI (Server Name Indication) extension by default, which may cause some
* servers to return an incorrect certificate or reject the connection
* altogether. To enable SNI, set the `servername` option in addition
* to `host`.
*
* The following illustrates a client for the echo server example from {@link createServer}:
*
* ```js
* // Assumes an echo server that is listening on port 8000.
* const tls = require('tls');
* const fs = require('fs');
*
* const options = {
* // Necessary only if the server requires client certificate authentication.
* key: fs.readFileSync('client-key.pem'),
* cert: fs.readFileSync('client-cert.pem'),
*
* // Necessary only if the server uses a self-signed certificate.
* ca: [ fs.readFileSync('server-cert.pem') ],
*
* // Necessary only if the server's cert isn't for "localhost".
* checkServerIdentity: () => { return null; },
* };
*
* const socket = tls.connect(8000, options, () => {
* console.log('client connected',
* socket.authorized ? 'authorized' : 'unauthorized');
* process.stdin.pipe(socket);
* process.stdin.resume();
* });
* socket.setEncoding('utf8');
* socket.on('data', (data) => {
* console.log(data);
* });
* socket.on('end', () => {
* console.log('server ends connection');
* });
* ```
* @since v0.11.3
*/
function connect(options: ConnectionOptions, secureConnectListener?: () => void): TLSSocket;
function connect(port: number, host?: string, options?: ConnectionOptions, secureConnectListener?: () => void): TLSSocket;
function connect(port: number, options?: ConnectionOptions, secureConnectListener?: () => void): TLSSocket;
/**
* Creates a new secure pair object with two streams, one of which reads and writes
* the encrypted data and the other of which reads and writes the cleartext data.
* Generally, the encrypted stream is piped to/from an incoming encrypted data
* stream and the cleartext one is used as a replacement for the initial encrypted
* stream.
*
* `tls.createSecurePair()` returns a `tls.SecurePair` object with `cleartext` and`encrypted` stream properties.
*
* Using `cleartext` has the same API as {@link TLSSocket}.
*
* The `tls.createSecurePair()` method is now deprecated in favor of`tls.TLSSocket()`. For example, the code:
*
* ```js
* pair = tls.createSecurePair(// ... );
* pair.encrypted.pipe(socket);
* socket.pipe(pair.encrypted);
* ```
*
* can be replaced by:
*
* ```js
* secureSocket = tls.TLSSocket(socket, options);
* ```
*
* where `secureSocket` has the same API as `pair.cleartext`.
* @since v0.3.2
* @deprecated Since v0.11.3 - Use {@link TLSSocket} instead.
* @param context A secure context object as returned by `tls.createSecureContext()`
* @param isServer `true` to specify that this TLS connection should be opened as a server.
* @param requestCert `true` to specify whether a server should request a certificate from a connecting client. Only applies when `isServer` is `true`.
* @param rejectUnauthorized If not `false` a server automatically reject clients with invalid certificates. Only applies when `isServer` is `true`.
*/
function createSecurePair(context?: SecureContext, isServer?: boolean, requestCert?: boolean, rejectUnauthorized?: boolean): SecurePair;
/**
* {@link createServer} sets the default value of the `honorCipherOrder` option
* to `true`, other APIs that create secure contexts leave it unset.
*
* {@link createServer} uses a 128 bit truncated SHA1 hash value generated
* from `process.argv` as the default value of the `sessionIdContext` option, other
* APIs that create secure contexts have no default value.
*
* The `tls.createSecureContext()` method creates a `SecureContext` object. It is
* usable as an argument to several `tls` APIs, such as {@link createServer} and `server.addContext()`, but has no public methods.
*
* A key is _required_ for ciphers that use certificates. Either `key` or`pfx` can be used to provide it.
*
* If the `ca` option is not given, then Node.js will default to using [Mozilla's publicly trusted list of
* CAs](https://hg.mozilla.org/mozilla-central/raw-file/tip/security/nss/lib/ckfw/builtins/certdata.txt).
* @since v0.11.13
*/
function createSecureContext(options?: SecureContextOptions): SecureContext;
/**
* Returns an array with the names of the supported TLS ciphers. The names are
* lower-case for historical reasons, but must be uppercased to be used in
* the `ciphers` option of {@link createSecureContext}.
*
* Not all supported ciphers are enabled by default. See `Modifying the default TLS cipher suite`.
*
* Cipher names that start with `'tls_'` are for TLSv1.3, all the others are for
* TLSv1.2 and below.
*
* ```js
* console.log(tls.getCiphers()); // ['aes128-gcm-sha256', 'aes128-sha', ...]
* ```
* @since v0.10.2
*/
function getCiphers(): string[];
/**
* The default curve name to use for ECDH key agreement in a tls server.
* The default value is 'auto'. See tls.createSecureContext() for further
* information.
*/
let DEFAULT_ECDH_CURVE: string;
/**
* The default value of the maxVersion option of
* tls.createSecureContext(). It can be assigned any of the supported TLS
* protocol versions, 'TLSv1.3', 'TLSv1.2', 'TLSv1.1', or 'TLSv1'. Default:
* 'TLSv1.3', unless changed using CLI options. Using --tls-max-v1.2 sets
* the default to 'TLSv1.2'. Using --tls-max-v1.3 sets the default to
* 'TLSv1.3'. If multiple of the options are provided, the highest maximum
* is used.
*/
let DEFAULT_MAX_VERSION: SecureVersion;
/**
* The default value of the minVersion option of tls.createSecureContext().
* It can be assigned any of the supported TLS protocol versions,
* 'TLSv1.3', 'TLSv1.2', 'TLSv1.1', or 'TLSv1'. Default: 'TLSv1.2', unless
* changed using CLI options. Using --tls-min-v1.0 sets the default to
* 'TLSv1'. Using --tls-min-v1.1 sets the default to 'TLSv1.1'. Using
* --tls-min-v1.3 sets the default to 'TLSv1.3'. If multiple of the options
* are provided, the lowest minimum is used.
*/
let DEFAULT_MIN_VERSION: SecureVersion;
/**
* The default value of the ciphers option of tls.createSecureContext().
* It can be assigned any of the supported OpenSSL ciphers.
* Defaults to the content of crypto.constants.defaultCoreCipherList, unless
* changed using CLI options using --tls-default-ciphers.
*/
let DEFAULT_CIPHERS: string;
/**
* An immutable array of strings representing the root certificates (in PEM
* format) used for verifying peer certificates. This is the default value
* of the ca option to tls.createSecureContext().
*/
const rootCertificates: ReadonlyArray<string>;
}
declare module 'node:tls' {
export * from 'tls';
} | PypiClean |
/ClueDojo-1.4.3-1.tar.gz/ClueDojo-1.4.3-1/src/cluedojo/static/dojox/date/php.js | if(!dojo._hasResource["dojox.date.php"]){
dojo._hasResource["dojox.date.php"]=true;
dojo.provide("dojox.date.php");
dojo.require("dojo.date");
dojo.require("dojox.string.tokenize");
dojox.date.php.format=function(_1,_2){
var df=new dojox.date.php.DateFormat(_2);
return df.format(_1);
};
dojox.date.php.DateFormat=function(_3){
if(!this.regex){
var _4=[];
for(var _5 in this.constructor.prototype){
if(dojo.isString(_5)&&_5.length==1&&dojo.isFunction(this[_5])){
_4.push(_5);
}
}
this.constructor.prototype.regex=new RegExp("(?:(\\\\.)|(["+_4.join("")+"]))","g");
}
var _6=[];
this.tokens=dojox.string.tokenize(_3,this.regex,function(_7,_8,i){
if(_8){
_6.push([i,_8]);
return _8;
}
if(_7){
return _7.charAt(1);
}
});
this.replacements=_6;
};
dojo.extend(dojox.date.php.DateFormat,{weekdays:["Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday"],weekdays_3:["Sun","Mon","Tue","Wed","Thu","Fri","Sat"],months:["January","February","March","April","May","June","July","August","September","October","November","December"],months_3:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"],monthdays:[31,28,31,30,31,30,31,31,30,31,30,31],format:function(_9){
this.date=_9;
for(var i=0,_a;_a=this.replacements[i];i++){
this.tokens[_a[0]]=this[_a[1]]();
}
return this.tokens.join("");
},d:function(){
var j=this.j();
return (j.length==1)?"0"+j:j;
},D:function(){
return this.weekdays_3[this.date.getDay()];
},j:function(){
return this.date.getDate()+"";
},l:function(){
return this.weekdays[this.date.getDay()];
},N:function(){
var w=this.w();
return (!w)?7:w;
},S:function(){
switch(this.date.getDate()){
case 11:
case 12:
case 13:
return "th";
case 1:
case 21:
case 31:
return "st";
case 2:
case 22:
return "nd";
case 3:
case 23:
return "rd";
default:
return "th";
}
},w:function(){
return this.date.getDay()+"";
},z:function(){
var _b=this.date.getTime()-new Date(this.date.getFullYear(),0,1).getTime();
return Math.floor(_b/86400000)+"";
},W:function(){
var _c;
var _d=new Date(this.date.getFullYear(),0,1).getDay()+1;
var w=this.date.getDay()+1;
var z=parseInt(this.z());
if(z<=(8-_d)&&_d>4){
var _e=new Date(this.date.getFullYear()-1,this.date.getMonth(),this.date.getDate());
if(_d==5||(_d==6&&dojo.date.isLeapYear(_e))){
_c=53;
}else{
_c=52;
}
}else{
var i;
if(Boolean(this.L())){
i=366;
}else{
i=365;
}
if((i-z)<(4-w)){
_c=1;
}else{
var j=z+(7-w)+(_d-1);
_c=Math.ceil(j/7);
if(_d>4){
--_c;
}
}
}
return _c;
},F:function(){
return this.months[this.date.getMonth()];
},m:function(){
var n=this.n();
return (n.length==1)?"0"+n:n;
},M:function(){
return this.months_3[this.date.getMonth()];
},n:function(){
return this.date.getMonth()+1+"";
},t:function(){
return (Boolean(this.L())&&this.date.getMonth()==1)?29:this.monthdays[this.getMonth()];
},L:function(){
return (dojo.date.isLeapYear(this.date))?"1":"0";
},o:function(){
},Y:function(){
return this.date.getFullYear()+"";
},y:function(){
return this.Y().slice(-2);
},a:function(){
return this.date.getHours()>=12?"pm":"am";
},b:function(){
return this.a().toUpperCase();
},B:function(){
var _f=this.date.getTimezoneOffset()+60;
var _10=(this.date.getHours()*3600)+(this.date.getMinutes()*60)+this.getSeconds()+(_f*60);
var _11=Math.abs(Math.floor(_10/86.4)%1000)+"";
while(_11.length<2){
_11="0"+_11;
}
return _11;
},g:function(){
return (this.date.getHours()>12)?this.date.getHours()-12+"":this.date.getHours()+"";
},G:function(){
return this.date.getHours()+"";
},h:function(){
var g=this.g();
return (g.length==1)?"0"+g:g;
},H:function(){
var G=this.G();
return (G.length==1)?"0"+G:G;
},i:function(){
var _12=this.date.getMinutes()+"";
return (_12.length==1)?"0"+_12:_12;
},s:function(){
var _13=this.date.getSeconds()+"";
return (_13.length==1)?"0"+_13:_13;
},e:function(){
return dojo.date.getTimezoneName(this.date);
},I:function(){
},O:function(){
var off=Math.abs(this.date.getTimezoneOffset());
var _14=Math.floor(off/60)+"";
var _15=(off%60)+"";
if(_14.length==1){
_14="0"+_14;
}
if(_15.length==1){
_14="0"+_15;
}
return ((this.date.getTimezoneOffset()<0)?"+":"-")+_14+_15;
},P:function(){
var O=this.O();
return O.substring(0,2)+":"+O.substring(2,4);
},T:function(){
return this.e().substring(0,3);
},Z:function(){
return this.date.getTimezoneOffset()*-60;
},c:function(){
return this.Y()+"-"+this.m()+"-"+this.d()+"T"+this.h()+":"+this.i()+":"+this.s()+this.P();
},r:function(){
return this.D()+", "+this.d()+" "+this.M()+" "+this.Y()+" "+this.H()+":"+this.i()+":"+this.s()+" "+this.O();
},U:function(){
return Math.floor(this.date.getTime()/1000);
}});
} | PypiClean |
/LigBinder-0.1.4-py3-none-any.whl/ligbinder/report.py | import os
import logging
from typing import List
import pytraj
import yaml
from ligbinder.settings import SETTINGS
from ligbinder.tree import Tree
import math
logger = logging.getLogger(__name__)
class Reporter:
def __init__(self, tree: Tree) -> None:
self.tree = tree
self.path = tree.path
self.report_dir = os.path.join(self.path, SETTINGS["results"]["report_dir"])
def _create_report_dir(self):
os.makedirs(self.report_dir, exist_ok=True)
def _concat_trajectory(self, indices: List[int]):
# get filenames
traj_files = [
os.path.join(self.path, f"node_{index}", SETTINGS["md"]["trj_file"])
for index in indices if index != 0
]
top_file = os.path.join(self.path, SETTINGS["data_files"]["top_file"])
ref_file = os.path.join(self.path, SETTINGS["data_files"]["ref_file"])
ref_top_file = os.path.join(self.path, SETTINGS["data_files"]["ref_top_file"])
full_traj_file = os.path.join(self.report_dir, SETTINGS["results"]["trj_file"])
# load, align write
load_mask = SETTINGS["system"]["load_mask"]
traj = pytraj.iterload(traj_files, top=top_file, mask=load_mask)
ref = pytraj.load(ref_file, top=ref_top_file, mask=load_mask)
mask = SETTINGS["system"]["protein_mask"]
pytraj.rmsd(traj, mask=mask, ref=ref)
pytraj.write_traj(full_traj_file, traj)
def _write_node_list_file(self, indices: List[int]):
node_list_file = os.path.join(self.report_dir, SETTINGS["results"]["idx_file"])
with open(node_list_file, "w") as idx_file:
idx_file.write("\n".join(map(str, indices))+"\n")
def _write_rmsd_file(self, indices: List[int]):
rmsd_file = os.path.join(self.report_dir, SETTINGS["results"]["rms_file"])
rmsds = [self.tree.nodes[index].rmsd for index in indices]
with open(rmsd_file, "w") as rms_file:
rms_file.write("\n".join(map(str, rmsds))+"\n")
def _write_stats(self):
stats_filename = os.path.join(
self.report_dir, SETTINGS["results"]["stats_file"]
)
best_node = self.tree.get_best_node()
report = {
"converged": self.tree.has_converged(),
"total_nodes": len(self.tree.nodes),
"max_depth": max([node.depth for node in self.tree.nodes.values()]),
"best_node": {
"node_id": best_node.node_id,
"rmsd": best_node.rmsd,
"nrmsd": best_node.nrmsd,
"pBP": -math.log10(self.tree.get_biasing_power(best_node)),
"path": self.tree.get_path_to_node(best_node),
}
}
with open(stats_filename, "w") as stats_file:
yaml.dump(report, stats_file)
def compile_results(self):
node_ids = self.tree.get_solution_path()
self._create_report_dir()
if self.tree.has_converged():
logger.warning("SUCCESS: LIGAND BOUND!!!")
if SETTINGS["results"]["join_trajectories"]:
self._concat_trajectory(node_ids)
self._write_node_list_file(node_ids)
self._write_rmsd_file(node_ids)
else:
logger.warning("FAILURE: UNABLE TO BIND")
logger.info("writing report")
self._write_stats() | PypiClean |
/Data-CAT-0.7.2.tar.gz/Data-CAT-0.7.2/dataCAT/property_dset.py | from __future__ import annotations
from typing import Union, Sequence, Any, Optional, Dict, TYPE_CHECKING
import h5py
import numpy as np
import pandas as pd
from assertionlib import assertion
if TYPE_CHECKING:
from numpy.typing import DTypeLike
else:
DTypeLike = 'numpy.typing.DTypeLike'
__all__ = ['create_prop_group', 'create_prop_dset', 'update_prop_dset',
'validate_prop_group', 'prop_to_dataframe', 'index_to_pandas']
PROPERTY_DOC = r"""A h5py Group containing an arbitrary number of quantum-mechanical properties.
Attributes
----------
\*args : dataset
An arbitrary user-specified property-containing dataset.
index : attribute
A reference to the dataset used as dimensional scale for all property
datasets embedded within this group.
"""
def create_prop_group(file: h5py.Group, scale: h5py.Dataset) -> h5py.Group:
r"""Create a group for holding user-specified properties.
.. testsetup:: python
>>> import os
>>> from shutil import copyfile
>>> from dataCAT.testing_utils import HDF5_READ, HDF5_TMP as hdf5_file
>>> if os.path.isfile(hdf5_file):
... os.remove(hdf5_file)
>>> _ = copyfile(HDF5_READ, hdf5_file)
.. code:: python
>>> import h5py
>>> from dataCAT import create_prop_group
>>> hdf5_file = str(...) # doctest: +SKIP
>>> with h5py.File(hdf5_file, 'r+') as f:
... scale = f.create_dataset('index', data=np.arange(10))
... scale.make_scale('index')
...
... group = create_prop_group(f, scale=scale)
... print('group', '=', group)
group = <HDF5 group "/properties" (0 members)>
.. testcleanup:: python
>>> if os.path.isfile(hdf5_file):
... os.remove(hdf5_file)
Parameters
----------
file : :class:`h5py.File` or :class:`h5py.Group`
The File or Group where the new ``"properties"`` group should be created.
scale : :class:`h5py.DataSet`
The dimensional scale which will be attached to all property datasets
created by :func:`dataCAT.create_prop_dset`.
Returns
-------
:class:`h5py.Group`
The newly created group.
"""
# Construct the group
grp = file.create_group('properties', track_order=True)
grp.attrs['index'] = scale.ref
grp.attrs['__doc__'] = np.string_(PROPERTY_DOC)
return grp
def create_prop_dset(group: h5py.Group, name: str, dtype: DTypeLike = None,
prop_names: Optional[Sequence[str]] = None,
**kwargs: Any) -> h5py.Dataset:
r"""Construct a new dataset for holding a user-defined molecular property.
Examples
--------
In the example below a new dataset is created for storing
solvation energies in water, methanol and ethanol.
.. testsetup:: python
>>> import os
>>> from shutil import copyfile
>>> from dataCAT.testing_utils import HDF5_READ, HDF5_TMP as hdf5_file
>>> if os.path.isfile(hdf5_file):
... os.remove(hdf5_file)
>>> _ = copyfile(HDF5_READ, hdf5_file)
>>> with h5py.File(hdf5_file, 'r+') as f:
... scale = f.create_dataset('index', data=np.arange(10))
... scale.make_scale('index')
... _ = create_prop_group(f, scale=scale)
.. code:: python
>>> import h5py
>>> from dataCAT import create_prop_dset
>>> hdf5_file = str(...) # doctest: +SKIP
>>> with h5py.File(hdf5_file, 'r+') as f:
... group = f['properties']
... prop_names = ['water', 'methanol', 'ethanol']
...
... dset = create_prop_dset(group, 'E_solv', prop_names=prop_names)
... dset_names = group['E_solv_names']
...
... print('group', '=', group)
... print('group["E_solv"]', '=', dset)
... print('group["E_solv_names"]', '=', dset_names)
group = <HDF5 group "/properties" (2 members)>
group["E_solv"] = <HDF5 dataset "E_solv": shape (10, 3), type "<f4">
group["E_solv_names"] = <HDF5 dataset "E_solv_names": shape (3,), type "|S8">
.. testcleanup:: python
>>> import os
>>> if os.path.isfile(hdf5_file):
... os.remove(hdf5_file)
Parameters
----------
group : :class:`h5py.Group`
The ``"properties"`` group where the new dataset will be created.
name : :class:`str`
The name of the new dataset.
prop_names : :class:`Sequence[str]<typing.Sequence>`, optional
The names of each row in the to-be created dataset.
Used for defining the length of the second axis and
will be used as a dimensional scale for aforementioned axis.
If :data:`None`, create a 1D dataset (with no columns) instead.
dtype : dtype-like
The data type of the to-be created dataset.
\**kwargs : :data:`~Any`
Further keyword arguments for the h5py :meth:`~h5py.Group.create_dataset` method.
Returns
-------
:class:`h5py.Dataset`
The newly created dataset.
"""
scale_name = f'{name}_names'
index_ref = group.attrs['index']
index = group.file[index_ref]
index_name = index.name.rsplit('/', 1)[-1]
n = len(index)
# If no prop_names are specified
if prop_names is None:
dset = group.create_dataset(name, shape=(n,), maxshape=(None,), dtype=dtype, **kwargs)
dset.dims[0].label = index_name
dset.dims[0].attach_scale(index)
return dset
# Parse the names
name_array = np.asarray(prop_names, dtype=np.string_)
if name_array.ndim != 1:
raise ValueError("'prop_names' expected None or a 1D array-like object; "
f"observed dimensionality: {name_array.ndim!r}")
# Construct the new datasets
m = len(name_array)
dset = group.create_dataset(
name,
shape=(n, m),
maxshape=(None, m),
dtype=dtype,
fillvalue=(_null_value(dtype) if dtype != object else None),
**kwargs
)
scale = group.create_dataset(scale_name, data=name_array, shape=(m,), dtype=name_array.dtype)
scale.make_scale(scale_name)
# Set the dimensional scale
dset.dims[0].label = index_name
dset.dims[0].attach_scale(index)
dset.dims[1].label = scale_name
dset.dims[1].attach_scale(scale)
return dset
def _null_value(dtype_like: DTypeLike) -> np.generic:
dtype = np.dtype(dtype_like)
generic: type[np.generic] = dtype.type
if issubclass(generic, (np.number, np.bool_)): # Numerical scalars
return generic(False)
elif not issubclass(generic, np.void): # Strings, bytes & datetime64
return generic('')
# Structured dtypes
values = (v[0] for v in dtype.fields.values()) # type: ignore[union-attr]
data = tuple(_null_value(field_dtype) for field_dtype in values)
return np.array(data, dtype=dtype).take(0) # type: ignore[no-any-return]
def _resize_prop_dset(dset: h5py.Dataset) -> None:
"""Ensure that **dset** is as long as its dimensional scale."""
scale = dset.dims[0][0]
n = len(scale)
if n > len(dset):
dset.resize(n, axis=0)
def update_prop_dset(dset: h5py.Dataset, data: np.ndarray,
index: Union[None, slice, np.ndarray] = None) -> None:
"""Update **dset** at position **index** with **data**.
Parameters
----------
dset : :class:`h5py.Dataset`
The to-be updated h5py dataset.
data : :class:`numpy.ndarray`
An array containing the to-be added data.
index : :class:`slice` or :class:`numpy.ndarray`, optional
The indices of all to-be updated elements in **dset**.
**index** either should be of the same length as **data**.
:rtype: :data:`None`
"""
idx = slice(None) if index is None else index
try:
_resize_prop_dset(dset)
dset[idx] = data
except Exception:
validate_prop_group(dset.parent)
raise
def validate_prop_group(group: h5py.Group) -> None:
"""Validate the passed hdf5 **group**, ensuring it is compatible with :func:`create_prop_group` and :func:`create_prop_group`.
This method is called automatically when an exception is raised by :func:`update_prop_dset`.
Parameters
----------
group : :class:`h5py.Group`
The to-be validated hdf5 Group.
Raises
------
:exc:`AssertionError`
Raised if the validation process fails.
""" # noqa: E501
assertion.isinstance(group, h5py.Group)
idx_ref = group.attrs['index']
idx = group.file[idx_ref]
iterator = ((k, v) for k, v in group.items() if not k.endswith('_names'))
for name, dset in iterator:
assertion.le(len(dset), len(idx), message=f'{name!r} invalid dataset length')
assertion.contains(dset.dims[0].keys(), 'index', message=f'{name!r} missing dataset scale')
assertion.eq(dset.dims[0]['index'], idx, message=f'{name!r} invalid dataset scale')
def prop_to_dataframe(dset: h5py.Dataset, dtype: DTypeLike = None) -> pd.DataFrame:
"""Convert the passed property Dataset into a DataFrame.
Examples
--------
.. testsetup:: python
>>> from dataCAT.testing_utils import HDF5_READ as hdf5_file
.. code:: python
>>> import h5py
>>> from dataCAT import prop_to_dataframe
>>> hdf5_file = str(...) # doctest: +SKIP
>>> with h5py.File(hdf5_file, 'r') as f:
... dset = f['ligand/properties/E_solv']
... df = prop_to_dataframe(dset)
... print(df) # doctest: +NORMALIZE_WHITESPACE
E_solv_names water methanol ethanol
ligand ligand anchor
O=C=O O1 -0.918837 -0.151129 -0.177396
O3 -0.221182 -0.261591 -0.712906
CCCO O4 -0.314799 -0.784353 -0.190898
Parameters
----------
dset : :class:`h5py.Dataset`
The property-containing Dataset of interest.
dtype : dtype-like, optional
The data type of the to-be returned DataFrame.
Use :data:`None` to default to the data type of **dset**.
Returns
-------
:class:`pandas.DataFrame`
A DataFrame constructed from the passed **dset**.
""" # noqa: E501
# Construct the index
dim0 = dset.dims[0]
scale0 = dim0[0]
index = index_to_pandas(scale0)
# Construct the columns
if dset.ndim == 1:
full_name = dset.name
name = full_name.rsplit('/', 1)[-1]
columns = pd.Index([name])
else:
dim1 = dset.dims[1]
scale1 = dim1[0]
columns = pd.Index(scale1[:].astype(str), name=dim1.label)
# Create and return the dataframe
if dtype is None:
return pd.DataFrame(dset[:], index=index, columns=columns)
# If possible, let h5py handle the datatype conversion
# This will often fail when dset.dtype consists of variable-length bytes-strings
try:
return pd.DataFrame(dset.astype(dtype)[:], index=index, columns=columns)
except (ValueError, TypeError):
return pd.DataFrame(dset[:].astype(dtype), index=index, columns=columns)
def index_to_pandas(dset: h5py.Dataset, fields: None | Sequence[str] = None) -> pd.MultiIndex:
"""Construct an MultiIndex from the passed ``index`` dataset.
Examples
--------
.. testsetup:: python
>>> from dataCAT.testing_utils import HDF5_READ as filename
.. code:: python
>>> from dataCAT import index_to_pandas
>>> import h5py
>>> filename = str(...) # doctest: +SKIP
# Convert the entire dataset
>>> with h5py.File(filename, "r") as f:
... dset: h5py.Dataset = f["ligand"]["index"]
... index_to_pandas(dset)
MultiIndex([('O=C=O', 'O1'),
('O=C=O', 'O3'),
( 'CCCO', 'O4')],
names=['ligand', 'ligand anchor'])
# Convert a subset of fields
>>> with h5py.File(filename, "r") as f:
... dset = f["ligand"]["index"]
... index_to_pandas(dset, fields=["ligand"])
MultiIndex([('O=C=O',),
('O=C=O',),
( 'CCCO',)],
names=['ligand'])
Parameters
----------
dset : :class:`h5py.Dataset`
The relevant ``index`` dataset.
fields : :class:`Sequence[str]<collections.abc.Sequence>`
The names of the ``index`` fields that are to-be included in the
returned MultiIndex. If :data:`None`, include all fields.
Returns
-------
:class:`pandas.MultiIndex`
A multi-index constructed from the passed dataset.
"""
# Fast-path for non-void-based datasets
if dset.dtype.fields is None:
if h5py.check_string_dtype(dset.dtype):
ar = dset[:].astype(str)
elif h5py.check_vlen_dtype(dset.dtype):
ar = _vlen_to_tuples(dset[:])
else:
ar = dset[:]
return pd.MultiIndex.from_arrays([ar])
# Parse the `fields` parameter
if fields is None:
field_names = list(dset.dtype.fields.keys())
iterator = ((name, f_dtype) for name, (f_dtype, *_) in dset.dtype.fields.items())
else:
field_names = list(fields)
iterator = ((name, dset.dtype.fields[name][0]) for name in fields)
if len(field_names) == 0:
raise ValueError("At least one field is required")
fields_lst = []
index_ar = dset[:]
for name, field_dtype in iterator:
# It's a bytes-string; decode it
if h5py.check_string_dtype(field_dtype):
ar = index_ar[name].astype(str)
# It's a h5py `vlen` dtype; convert it into a list of tuples
elif h5py.check_vlen_dtype(field_dtype):
ar = _vlen_to_tuples(index_ar[name])
else:
ar = index_ar[name]
fields_lst.append(ar)
return pd.MultiIndex.from_arrays(fields_lst, names=field_names)
def _vlen_to_tuples(array: np.ndarray) -> np.ndarray:
"""Convert an (object) array consisting of arrays into an (object) array of tuples."""
cache: Dict[bytes, tuple] = {}
ret = np.empty_like(array, dtype=object)
for i, ar in enumerate(array):
byte = ar.tobytes()
try:
tup = cache[byte]
except KeyError:
cache[byte] = tup = tuple(ar)
ret[i] = tup
return ret | PypiClean |
/KFlask-AppBuilder-1.0.1.tar.gz/KFlask-AppBuilder-1.0.1/flask_appbuilder/models/sqla/filters.py | import logging
from flask_babel import lazy_gettext
from ..filters import BaseFilter, FilterRelation, BaseFilterConverter
log = logging.getLogger(__name__)
__all__ = ['SQLAFilterConverter', 'FilterEqual', 'FilterNotStartsWith', 'FilterStartsWith', 'FilterContains',
'FilterNotEqual', 'FilterEndsWith', 'FilterEqualFunction', 'FilterGreater', 'FilterNotEndsWith',
'FilterRelationManyToManyEqual', 'FilterRelationOneToManyEqual', 'FilterRelationOneToManyNotEqual',
'FilterSmaller']
def get_field_setup_query(query, model, column_name):
"""
Help function for SQLA filters, checks for dot notation on column names.
If it exists, will join the query with the model from the first part of the field name.
example:
Contact.created_by: if created_by is a User model, it will be joined to the query.
"""
if not hasattr(model, column_name):
# it's an inner obj attr
rel_model = getattr(model, column_name.split('.')[0]).mapper.class_
query = query.join(rel_model)
return query, getattr(rel_model, column_name.split('.')[1])
else:
return query, getattr(model, column_name)
def set_value_to_type(datamodel, column_name, value):
if datamodel.is_integer(column_name):
try:
return int(value)
except Exception as e:
return None
elif datamodel.is_float(column_name):
try:
return float(value)
except Exception as e:
return None
elif datamodel.is_boolean(column_name):
if value == 'y':
return True
return value
class FilterStartsWith(BaseFilter):
name = lazy_gettext('Starts with')
def apply(self, query, value):
query, field = get_field_setup_query(query, self.model, self.column_name)
return query.filter(field.like(value + '%'))
class FilterNotStartsWith(BaseFilter):
name = lazy_gettext('Not Starts with')
def apply(self, query, value):
query, field = get_field_setup_query(query, self.model, self.column_name)
return query.filter(~field.like(value + '%'))
class FilterEndsWith(BaseFilter):
name = lazy_gettext('Ends with')
def apply(self, query, value):
query, field = get_field_setup_query(query, self.model, self.column_name)
return query.filter(field.like('%' + value))
class FilterNotEndsWith(BaseFilter):
name = lazy_gettext('Not Ends with')
def apply(self, query, value):
query, field = get_field_setup_query(query, self.model, self.column_name)
return query.filter(~field.like('%' + value))
class FilterContains(BaseFilter):
name = lazy_gettext('Contains')
def apply(self, query, value):
query, field = get_field_setup_query(query, self.model, self.column_name)
return query.filter(field.like('%' + value + '%'))
class FilterNotContains(BaseFilter):
name = lazy_gettext('Not Contains')
def apply(self, query, value):
query, field = get_field_setup_query(query, self.model, self.column_name)
return query.filter(~field.like('%' + value + '%'))
class FilterEqual(BaseFilter):
name = lazy_gettext('Equal to')
def apply(self, query, value):
query, field = get_field_setup_query(query, self.model, self.column_name)
value = set_value_to_type(self.datamodel, self.column_name, value)
return query.filter(field == value)
class FilterNotEqual(BaseFilter):
name = lazy_gettext('Not Equal to')
def apply(self, query, value):
query, field = get_field_setup_query(query, self.model, self.column_name)
value = set_value_to_type(self.datamodel, self.column_name, value)
return query.filter(field != value)
class FilterGreater(BaseFilter):
name = lazy_gettext('Greater than')
def apply(self, query, value):
query, field = get_field_setup_query(query, self.model, self.column_name)
value = set_value_to_type(self.datamodel, self.column_name, value)
return query.filter(field > value)
class FilterSmaller(BaseFilter):
name = lazy_gettext('Smaller than')
def apply(self, query, value):
query, field = get_field_setup_query(query, self.model, self.column_name)
value = set_value_to_type(self.datamodel, self.column_name, value)
return query.filter(field < value)
class FilterRelationOneToManyEqual(FilterRelation):
name = lazy_gettext('Relation')
def apply(self, query, value):
query, field = get_field_setup_query(query, self.model, self.column_name)
rel_obj = self.datamodel.get_related_obj(self.column_name, value)
return query.filter(field == rel_obj)
class FilterRelationOneToManyNotEqual(FilterRelation):
name = lazy_gettext('No Relation')
def apply(self, query, value):
query, field = get_field_setup_query(query, self.model, self.column_name)
rel_obj = self.datamodel.get_related_obj(self.column_name, value)
return query.filter(field != rel_obj)
class FilterRelationManyToManyEqual(FilterRelation):
name = lazy_gettext('Relation as Many')
def apply(self, query, value):
query, field = get_field_setup_query(query, self.model, self.column_name)
rel_obj = self.datamodel.get_related_obj(self.column_name, value)
return query.filter(field.contains(rel_obj))
class FilterEqualFunction(BaseFilter):
name = "Filter view with a function"
def apply(self, query, func):
query, field = get_field_setup_query(query, self.model, self.column_name)
return query.filter(field == func())
class FilterInFunction(BaseFilter):
name = "Filter view where field is in a list returned by a function"
def apply(self, query, func):
query, field = get_field_setup_query(query, self.model, self.column_name)
return query.filter(field.in_(func()))
class SQLAFilterConverter(BaseFilterConverter):
"""
Class for converting columns into a supported list of filters
specific for SQLAlchemy.
"""
conversion_table = (('is_relation_many_to_one', [FilterRelationOneToManyEqual,
FilterRelationOneToManyNotEqual]),
('is_relation_one_to_one', [FilterRelationOneToManyEqual,
FilterRelationOneToManyNotEqual]),
('is_relation_many_to_many', [FilterRelationManyToManyEqual]),
('is_relation_one_to_many', [FilterRelationManyToManyEqual]),
('is_enum', [FilterEqual,
FilterNotEqual]),
('is_text', [FilterStartsWith,
FilterEndsWith,
FilterContains,
FilterEqual,
FilterNotStartsWith,
FilterNotEndsWith,
FilterNotContains,
FilterNotEqual]),
('is_binary', [FilterStartsWith,
FilterEndsWith,
FilterContains,
FilterEqual,
FilterNotStartsWith,
FilterNotEndsWith,
FilterNotContains,
FilterNotEqual]),
('is_string', [FilterStartsWith,
FilterEndsWith,
FilterContains,
FilterEqual,
FilterNotStartsWith,
FilterNotEndsWith,
FilterNotContains,
FilterNotEqual]),
('is_integer', [FilterEqual,
FilterGreater,
FilterSmaller,
FilterNotEqual]),
('is_float', [FilterEqual,
FilterGreater,
FilterSmaller,
FilterNotEqual]),
('is_numeric', [FilterEqual,
FilterGreater,
FilterSmaller,
FilterNotEqual]),
('is_date', [FilterEqual,
FilterGreater,
FilterSmaller,
FilterNotEqual]),
('is_boolean', [FilterEqual,
FilterNotEqual]),
('is_datetime', [FilterEqual,
FilterGreater,
FilterSmaller,
FilterNotEqual]),
) | PypiClean |
/Flask-LwAdmin-0.6.3.tar.gz/Flask-LwAdmin-0.6.3/README.rst | =============
Flask-LwAdmin
=============
Flask Lightningwolf Admin
-------------------------
LwAdmin is a set of macros, classes, and methods to assist in the creation of administrative panels based on the
bootstrap framework. At the moment we are working on the main section of the navigation and basic helpers for CRUD model.
The main assumptions of LwAdmin is not to restrict the coder. LwAdmin it is not a complete utility to be run
as it is `Flask-Admin <https://github.com/mrjoes/flask-admin/>`_
If you are just thinking about the basic CRUD scheme should rather think of using `Flask-Admin <https://github.com/mrjoes/flask-admin/>`_.
This plugin is to be useful only when you need a custom administration panel - creating new pages requires that in this case more work to gain same effect.
At the moment we use a bootstrap version 2.3.x, but in the near future will be updated to version 3.0.x.
We are waiting for jQuery UI Bootstrap (v1.0) to have both Flask-Bootsrap and Flask-JqueryUiBootstrap in this version.
Versioning
++++++++++
The project uses the `Semantic Versioning 2.0.0 <http://semver.org/>`_. Work is currently underway,
but because Flask-LwAdmin is already being used in production enviroment then API changes occur only in case of change parameter **y** in (**0.y.z**)
Examples
++++++++
Check SAMPLE_PROJECT for basic use example. More advanced use in this moment can be check in:
`Lightningwolf Smp Project <https://git.thunderwolf.net/lightningwolf/lightningwolf-smp>`_
Documentation
+++++++++++++
The documentation will be created in the near future. We know how important good documentation is for the proper development of the project.
How you can help
----------------
Comments
++++++++
We like comments.
Bug reports
+++++++++++
If you want to help us out, please file a bug report on our tracker at:
`Github Flask-LwAdmin Issues <https://github.com/lightningwolf/Flask-LwAdmin/issues>`_
Please be as descriptive as possible and tell us what exactly you were doing
at the time something went wrong. If possible, send us a exception backtrace and if possible
example code.
Wishes
++++++
We can't possibly know what everyone wants, so we appreciate all feature
requests. These can be submitted to the issue tracker as well (see above).
**Any opinion and ideas are welcome.**
Programming
+++++++++++
Patches are welcome.
| PypiClean |
/Firefly%20III%20API%20Python%20Client-1.5.6.post2.tar.gz/Firefly III API Python Client-1.5.6.post2/firefly_iii_client/model/category.py | import re # noqa: F401
import sys # noqa: F401
from firefly_iii_client.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from firefly_iii_client.exceptions import ApiAttributeError
def lazy_import():
from firefly_iii_client.model.category_earned import CategoryEarned
from firefly_iii_client.model.category_spent import CategorySpent
globals()['CategoryEarned'] = CategoryEarned
globals()['CategorySpent'] = CategorySpent
class Category(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'name': (str,), # noqa: E501
'created_at': (datetime,), # noqa: E501
'earned': ([CategoryEarned],), # noqa: E501
'notes': (str, none_type,), # noqa: E501
'spent': ([CategorySpent],), # noqa: E501
'updated_at': (datetime,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'name': 'name', # noqa: E501
'created_at': 'created_at', # noqa: E501
'earned': 'earned', # noqa: E501
'notes': 'notes', # noqa: E501
'spent': 'spent', # noqa: E501
'updated_at': 'updated_at', # noqa: E501
}
read_only_vars = {
'created_at', # noqa: E501
'earned', # noqa: E501
'spent', # noqa: E501
'updated_at', # noqa: E501
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, name, *args, **kwargs): # noqa: E501
"""Category - a model defined in OpenAPI
Args:
name (str):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
created_at (datetime): [optional] # noqa: E501
earned ([CategoryEarned]): [optional] # noqa: E501
notes (str, none_type): [optional] # noqa: E501
spent ([CategorySpent]): [optional] # noqa: E501
updated_at (datetime): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.name = name
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, name, *args, **kwargs): # noqa: E501
"""Category - a model defined in OpenAPI
Args:
name (str):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
created_at (datetime): [optional] # noqa: E501
earned ([CategoryEarned]): [optional] # noqa: E501
notes (str, none_type): [optional] # noqa: E501
spent ([CategorySpent]): [optional] # noqa: E501
updated_at (datetime): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.name = name
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | PypiClean |
/NehorayRapid-0.0.1-py3-none-any.whl/mmedit/models/common/mask_conv_module.py | from mmcv.cnn import ConvModule
class MaskConvModule(ConvModule):
"""Mask convolution module.
This is a simple wrapper for mask convolution like: 'partial conv'.
Convolutions in this module always need a mask as extra input.
Args:
in_channels (int): Same as nn.Conv2d.
out_channels (int): Same as nn.Conv2d.
kernel_size (int or tuple[int]): Same as nn.Conv2d.
stride (int or tuple[int]): Same as nn.Conv2d.
padding (int or tuple[int]): Same as nn.Conv2d.
dilation (int or tuple[int]): Same as nn.Conv2d.
groups (int): Same as nn.Conv2d.
bias (bool or str): If specified as `auto`, it will be decided by the
norm_cfg. Bias will be set as True if norm_cfg is None, otherwise
False.
conv_cfg (dict): Config dict for convolution layer.
norm_cfg (dict): Config dict for normalization layer.
act_cfg (dict): Config dict for activation layer, "relu" by default.
inplace (bool): Whether to use inplace mode for activation.
with_spectral_norm (bool): Whether use spectral norm in conv module.
padding_mode (str): If the `padding_mode` has not been supported by
current `Conv2d` in Pytorch, we will use our own padding layer
instead. Currently, we support ['zeros', 'circular'] with official
implementation and ['reflect'] with our own implementation.
Default: 'zeros'.
order (tuple[str]): The order of conv/norm/activation layers. It is a
sequence of "conv", "norm" and "act". Examples are
("conv", "norm", "act") and ("act", "conv", "norm").
"""
supported_conv_list = ['PConv']
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
assert self.conv_cfg['type'] in self.supported_conv_list
self.init_weights()
def forward(self,
x,
mask=None,
activate=True,
norm=True,
return_mask=True):
"""Forward function for partial conv2d.
Args:
input (torch.Tensor): Tensor with shape of (n, c, h, w).
mask (torch.Tensor): Tensor with shape of (n, c, h, w) or
(n, 1, h, w). If mask is not given, the function will
work as standard conv2d. Default: None.
activate (bool): Whether use activation layer.
norm (bool): Whether use norm layer.
return_mask (bool): If True and mask is not None, the updated
mask will be returned. Default: True.
Returns:
Tensor or tuple: Result Tensor or 2-tuple of
``Tensor``: Results after partial conv.
``Tensor``: Updated mask will be returned if mask is given \
and `return_mask` is True.
"""
for layer in self.order:
if layer == 'conv':
if self.with_explicit_padding:
x = self.padding_layer(x)
mask = self.padding_layer(mask)
if return_mask:
x, updated_mask = self.conv(
x, mask, return_mask=return_mask)
else:
x = self.conv(x, mask, return_mask=False)
elif layer == 'norm' and norm and self.with_norm:
x = self.norm(x)
elif layer == 'act' and activate and self.with_activation:
x = self.activate(x)
if return_mask:
return x, updated_mask
return x | PypiClean |
/FoundryDataBrowser-190903.1.tar.gz/FoundryDataBrowser-190903.1/viewers/hyperspec_3d_h5.py | from ScopeFoundry.data_browser import HyperSpectralBaseView
import numpy as np
import h5py
import pyqtgraph as pg
from .scalebars import ConfocalScaleBar
from matplotlib.cm import ScalarMappable
from pyqtgraph.opengl import GLViewWidget, GLAxisItem, GLGridItem, GLVolumeItem
from scipy.interpolate import interp1d
from qtpy.QtWidgets import QPushButton
import time
class HyperSpec3DH5View(HyperSpectralBaseView):
name = 'hyperspec_3d_h5'
supported_measurements = ['oo_asi_hyperspec_3d_scan',
'andor_asi_hyperspec_3d_scan',]
def scan_specific_setup(self):
pass
def setup(self):
self.settings.New('sample', dtype=str, initial='')
self.settings.New('z_slice', dtype=float, choices=[0.0], initial=0.0)
self.settings.New('show_3d', dtype=bool, initial=False)
self.settings.New('vol_alpha', dtype=float, vmin=0.0, vmax=1.0,
initial=0.5)
self.settings.New(
'vol_colormap', dtype=str, initial='viridis',
choices=['viridis', 'plasma', 'inferno', 'magma', 'cividis',
'Greys', 'Purples', 'Blues', 'Greens', 'Oranges', 'Reds',
'YlOrBr', 'YlOrRd', 'OrRd', 'PuRd', 'RdPu', 'BuPu',
'GnBu', 'PuBu', 'YlGnBu', 'PuBuGn', 'BuGn', 'YlGn'])
# self.settings.New('vol_percentile', dtype=int, vmin=0, vmax=49,
# initial=5)
self.settings.New('vol_percentile_min', dtype=int, vmin=0, vmax=100,
initial=5)
self.settings.New('vol_percentile_max', dtype=int, vmin=0, vmax=100,
initial=95)
self.settings.New('vol_transparent_percentile', dtype=int, vmin=0,
vmax=100, initial=5)
self.settings.New('vol_transparent_min', dtype=bool, initial=False)
self.settings.z_slice.updated_choice_index_value.connect(
self.on_update_zslice_choice)
# self.settings.vol_colormap.updated_value.connect(self.calculate_volume)
# self.settings.vol_alpha.updated_value.connect(self.calculate_volume)
HyperSpectralBaseView.setup(self)
voldata = np.empty((1, 1, 1, 4), dtype=np.ubyte)
voldata[0, 0, 0, :] = [255, 255, 255, 0]
self.volitem = GLVolumeItem(data=voldata)
self.glview = GLViewWidget()
self.glaxis = GLAxisItem()
self.glgrid = GLGridItem()
self.glview.addItem(self.glgrid)
self.glview.addItem(self.glaxis)
self.glview.addItem(self.volitem)
self.gldock = self.dockarea.addDock(name='3D', widget=self.glview,
position='below',
relativeTo=self.image_dock)
self.calculate_3d_pushButton = QPushButton(text='calculate_3d')
self.settings_ui.layout().addWidget(self.calculate_3d_pushButton)
self.calculate_3d_pushButton.clicked.connect(self.calculate_volume)
self.image_dock.raiseDock()
def is_file_supported(self, fname):
return np.any([(meas_name in fname)
for meas_name in self.supported_measurements])
def reset(self):
if hasattr(self, 'dat'):
self.dat.close()
del self.dat
if hasattr(self, 'spec_map'):
del self.spec_map
if hasattr(self, 'scalebar'):
self.imview.getView().removeItem(self.scalebar)
del self.scalebar
if hasattr(self, 'volume'):
spoof_data = np.zeros((1, 1, 1, 4), dtype=np.ubyte)
self.volitem.setData(spoof_data)
del self.volume
self.settings.show_3d.update_value(False)
self.image_dock.raiseDock()
def load_data(self, fname):
self.dat = h5py.File(fname)
for meas_name in self.supported_measurements:
if meas_name in self.dat['measurement']:
self.M = self.dat['measurement'][meas_name]
for map_name in ['hyperspectral_map', 'spec_map']:
if map_name in self.M:
self.spec_map = np.array(self.M[map_name])
self.h_span = self.M['settings'].attrs['h_span']
self.x_array = np.array(self.M['h_array'])
self.z_array = np.array(self.M['z_array'])
units = self.M['settings/units'].attrs['h_span']
if units == 'mm':
self.h_span = self.h_span*1e-3
self.z_span = self.z_array*1e-3
self.settings.z_slice.change_unit('mm')
if 'dark_indices' in list(self.M.keys()):
print('dark indices found')
dark_indices = self.M['dark_indices']
if dark_indices.len() == 0:
self.spec_map = np.delete(
self.spec_map, list(dark_indices.shape), -1)
else:
self.spec_map = np.delete(
self.spec_map, np.array(dark_indices), -1)
else:
print('no dark indices')
self.hyperspec_data = self.spec_map[0, :, :, :]
self.display_image = self.hyperspec_data.sum(axis=-1)
self.settings.z_slice.change_choice_list(self.z_array.tolist())
self.settings.z_slice.update_value(self.z_array[0])
self.spec_x_array = np.arange(self.hyperspec_data.shape[-1])
for x_axis_name in ['wavelength', 'wls', 'wave_numbers',
'raman_shifts']:
if x_axis_name in self.M:
x_array = np.array(self.M[x_axis_name])
if 'dark_indices' in list(self.M.keys()):
dark_indices = self.M['dark_indices']
# The following is to read a dataset I initialized
# incorrectly for dark pixels. This can be replaced with
# the else statement entirely now that the measurement is
# fixed, but I still have a long measurement that will
# benefit from this.
if dark_indices.len() == 0:
x_array = np.delete(
x_array, list(dark_indices.shape), 0)
else:
x_array = np.delete(x_array, np.array(dark_indices), 0)
self.add_spec_x_array(x_axis_name, x_array)
self.x_axis.update_value(x_axis_name)
sample = self.dat['app/settings'].attrs['sample']
self.settings.sample.update_value(sample)
self.calculate_volume()
def on_update_zslice_choice(self, index):
if hasattr(self, 'spec_map'):
self.hyperspec_data = self.spec_map[index, :, :, :]
self.display_images['default'] = self.hyperspec_data
self.display_images['sum'] = self.hyperspec_data.sum(axis=-1)
self.spec_x_arrays['default'] = self.spec_x_array
self.spec_x_arrays['index'] = np.arange(
self.hyperspec_data.shape[-1])
self.recalc_bandpass_map()
self.recalc_median_map()
self.update_display()
def calculate_volume(self):
if not self.settings['show_3d']:
print('calculate_volume called without show_3d')
return
print('calculating 3d volume')
t0 = time.time()
if hasattr(self, 'volume'):
del self.volume
if hasattr(self, 'mappable'):
self.mappable.set_cmap(self.settings['vol_colormap'])
else:
self.mappable = ScalarMappable(cmap=self.settings['vol_colormap'])
z_span = self.z_array[-1] - self.z_array[0]
dx = self.x_array[1] - self.x_array[0]
z_interp_array = np.linspace(np.amin(self.z_array),
np.amax(self.z_array),
num=z_span/dx)
z_interp = None
self.volume = None
nz = len(z_interp_array)
if self.settings['display_image'] == 'bandpass_map':
print('bandpass_map selected')
x, slice = self.get_xhyperspec_data(apply_use_x_slice=True)
ind_min = np.nonzero(self.spec_x_array == x[0])[0][0]
ind_max = np.nonzero(self.spec_x_array == x[-1])[0][0]
data = np.zeros((len(self.z_array),) + slice.shape)
data = self.spec_map[:, :, :, ind_min:ind_max]
# for kk in range(len(self.z_array)):
# print(
# 'grabbing bandpass layer %d of %d' % (kk, len(self.z_array)))
# self.settings.z_slice.update_value(self.z_array[kk])
# x, data[kk, :, :, :] = self.get_xhyperspec_data(
# apply_use_x_slice=True)
z_interp = interp1d(self.z_array, data, axis=0)
else:
z_interp = interp1d(self.z_array, self.spec_map, axis=0)
data = z_interp(z_interp_array)
self.volume = np.zeros(data.shape[:-1] + (4,), dtype=np.ubyte)
pmin = self.settings['vol_percentile_min']
pmax = self.settings['vol_percentile_max']
self.mappable.set_array(data.sum(axis=-1))
vmin = np.percentile(data.sum(axis=-1), pmin)
vmax = np.percentile(data.sum(axis=-1), pmax)
tmin = np.percentile(
data.sum(axis=-1),
self.settings['vol_transparent_percentile'])
self.mappable.set_clim(vmin=vmin, vmax=vmax)
# self.mappable.autoscale()
for kk in range(nz):
print('calculating rgba vals for %d of %d layers' % (kk, nz))
sum_data = data[kk, :, :, :].sum(axis=-1)
# print(sum_data.shape, self.volume.shape)
self.volume[kk, :, :, :] = self.mappable.to_rgba(
sum_data,
alpha=self.settings['vol_alpha'],
bytes=True
)
if self.settings['vol_transparent_min']:
self.volume[kk, :, :, 3][np.nonzero(sum_data <= tmin)] = 0
print('3d volume calculation complete')
t1 = time.time()
print('time elapsed: %0.3f s' % (t1-t0))
kwargs = {'x': len(self.x_array), 'y': len(self.x_array), 'z': nz}
self.glaxis.setSize(**kwargs)
self.glgrid.setSize(**kwargs)
self.glgrid.setSpacing(x=1/dx*5, y=1/dx*5, z=1/dx*5)
# print(self.mappable.get_cmap().name)
# print(data.shape, self.volume.shape)
def update_display(self):
if hasattr(self, 'scalebar'):
self.imview.getView().removeItem(self.scalebar)
if self.display_image is not None:
# pyqtgraph axes are x,y, but data is stored in (frame, y,x, time),
# so we need to transpose
self.imview.getImageItem().setImage(self.display_image.T)
nn = self.display_image.shape
if hasattr(self, 'h_span'):
span = self.h_span
else:
span = -1
self.scalebar = ConfocalScaleBar(span=span, num_px=nn[0])
self.scalebar.setParentItem(self.imview.getView())
self.scalebar.anchor((1, 1), (1, 1), offset=(-20, -20))
if hasattr(self, 'volume') and self.settings['show_3d']:
self.volitem.setData(np.swapaxes(self.volume, 0, 2))
self.on_change_rect_roi()
self.on_update_circ_roi()
def matplotlib_colormap_to_pg_colormap(colormap_name, n_ticks=16):
'''
============= =========================================================
**Arguments**
colormap_name (string) name of a matplotlib colormap i.e. 'viridis'
n_ticks (int) Number of ticks to create when dict of functions
is used. Otherwise unused.
============= =========================================================
returns: (pgColormap) pyqtgraph colormap
primary Usage: <pg.ImageView>.setColorMap(pgColormap)
requires: cmapToColormap by Sebastian Hoefer
https://github.com/pyqtgraph/pyqtgraph/issues/561
'''
from matplotlib import cm
pos, rgba_colors = zip(*cmapToColormap(getattr(cm, colormap_name)), n_ticks)
pgColormap = pg.ColorMap(pos, rgba_colors)
return pgColormap
def cmapToColormap(cmap, nTicks=16):
"""
Converts a Matplotlib cmap to pyqtgraphs colormaps. No dependency on
matplotlib.
Parameters:
*cmap*: Cmap object. Imported from matplotlib.cm.*
*nTicks*: Number of ticks to create when dict of functions is used.
Otherwise unused.
author: Sebastian Hoefer
"""
import collections
# Case #1: a dictionary with 'red'/'green'/'blue' values as list of ranges (e.g. 'jet')
# The parameter 'cmap' is a 'matplotlib.colors.LinearSegmentedColormap' instance ...
if hasattr(cmap, '_segmentdata'):
colordata = getattr(cmap, '_segmentdata')
if ('red' in colordata) and isinstance(colordata['red'], collections.Sequence):
# collect the color ranges from all channels into one dict to get unique indices
posDict = {}
for idx, channel in enumerate(('red', 'green', 'blue')):
for colorRange in colordata[channel]:
posDict.setdefault(colorRange[0], [-1, -1, -1])[idx] = colorRange[2]
indexList = list(posDict.keys())
indexList.sort()
# interpolate missing values (== -1)
for channel in range(3): # R,G,B
startIdx = indexList[0]
emptyIdx = []
for curIdx in indexList:
if posDict[curIdx][channel] == -1:
emptyIdx.append(curIdx)
elif curIdx != indexList[0]:
for eIdx in emptyIdx:
rPos = (eIdx - startIdx) / (curIdx - startIdx)
vStart = posDict[startIdx][channel]
vRange = (posDict[curIdx][channel] - posDict[startIdx][channel])
posDict[eIdx][channel] = rPos * vRange + vStart
startIdx = curIdx
del emptyIdx[:]
for channel in range(3): # R,G,B
for curIdx in indexList:
posDict[curIdx][channel] *= 255
rgb_list = [[i, posDict[i]] for i in indexList]
# Case #2: a dictionary with 'red'/'green'/'blue' values as functions (e.g. 'gnuplot')
elif ('red' in colordata) and isinstance(colordata['red'], collections.Callable):
indices = np.linspace(0., 1., nTicks)
luts = [np.clip(np.array(colordata[rgb](indices), dtype=np.float), 0, 1) * 255 \
for rgb in ('red', 'green', 'blue')]
rgb_list = zip(indices, list(zip(*luts)))
# If the parameter 'cmap' is a 'matplotlib.colors.ListedColormap' instance, with the attributes 'colors' and 'N'
elif hasattr(cmap, 'colors') and hasattr(cmap, 'N'):
colordata = getattr(cmap, 'colors')
# Case #3: a list with RGB values (e.g. 'seismic')
if len(colordata[0]) == 3:
indices = np.linspace(0., 1., len(colordata))
scaledRgbTuples = [(rgbTuple[0] * 255, rgbTuple[1] * 255, rgbTuple[2] * 255) for rgbTuple in colordata]
rgb_list = zip(indices, scaledRgbTuples)
# Case #4: a list of tuples with positions and RGB-values (e.g. 'terrain')
# -> this section is probably not needed anymore!?
elif len(colordata[0]) == 2:
rgb_list = [(idx, (vals[0] * 255, vals[1] * 255, vals[2] * 255)) for idx, vals in colordata]
# Case #X: unknown format or datatype was the wrong object type
else:
raise ValueError("[cmapToColormap] Unknown cmap format or not a cmap!")
# Convert the RGB float values to RGBA integer values
return list([(pos, (int(r), int(g), int(b), 255)) for pos, (r, g, b) in rgb_list])
#
# class HyperSpecSpecMedianH5View(HyperSpectralBaseView):
#
# name = 'hyperspec_spec_median_npz'
#
# def is_file_supported(self, fname):
# return "_spec_scan.npz" in fname
#
#
# def load_data(self, fname):
# self.dat = np.load(fname)
#
# self.spec_map = self.dat['spec_map']
# self.wls = self.dat['wls']
# self.integrated_count_map = self.dat['integrated_count_map']
# self.spec_median_map = np.apply_along_axis(spectral_median, 2,
# self.spec_map[:,:,:],
# self.wls, 0)
# self.hyperspec_data = self.spec_map
# self.display_image = self.spec_median_map
# self.spec_x_array = self.wls
#
# def scan_specific_setup(self):
# self.spec_plot.setLabel('left', 'Intensity', units='counts')
# self.spec_plot.setLabel('bottom', 'Wavelength', units='nm')
#
# if __name__ == '__main__':
# import sys
#
# app = DataBrowser(sys.argv)
# app.load_view(HyperSpecH5View(app))
#
# sys.exit(app.exec_()) | PypiClean |
/NNBuilder-0.3.7.tar.gz/NNBuilder-0.3.7/nnbuilder/layers/misc.py | from simple import *
class beamsearch(LayerBase):
def __init__(self, **kwargs):
LayerBase.__init__(self, **kwargs)
def apply_predict_contexts(self, batchsize):
'''
Return two ordereddicts
First one's keys are names of contexts and values are corresponding updates graph of contexts
Second one's keys are names of contexts and values are corresponding shapes of contexts
:param batchsize: scalar
:return:
'''
predict_contexts, predict_contexts_shapes = OrderedDict(), OrderedDict()
return predict_contexts, predict_contexts_shapes
def apply_predict_initstates(self, batchsize, beamsize):
'''
Return two ordereddicts
First one's keys are names of initstates and values are corresponding attr of initstates
Second one's keys are names of initstates and values are corresponding shapes of initstates
:param batchsize: scalar
:return:
'''
predict_initstates_attr = OrderedDict()
predict_initstates_shapes = OrderedDict()
return predict_initstates_attr, predict_initstates_shapes
def apply_init_predict_step(self, contexts, initstates, batchsize, beamsize):
'''
Use shared init_states as input return
1.A graph of probability
Which has size of
(batchsize,beamsize,catglorysize)
Or
(batchsize,catglorysize)
2.Updates of initstates
:param initstates:
:return:
'''
probability = T.zeros([batchsize, beamsize, 100], [None, None, None], kernel.config.floatX)
updates = []
for k, v in initstates.items():
updates.append((v, v))
return probability, updates
def init_beamsearch(self, batchsize, beamsize):
beamchoice = kernel.placeholder('BeamChoice', ['batch', 'search'], kernel.config.catX)
contexts = OrderedDict()
initstates = OrderedDict()
predict_contexts, predict_contexts_shapes = self.apply_predict_contexts(batchsize)
predict_initstates_attr, predict_initstates_shapes = self.apply_predict_initstates(batchsize, beamsize)
contexts_updates = []
for k, v in predict_contexts.items():
contexts[k] = kernel.shared(np.ones(predict_contexts_shapes[k], kernel.config.floatX), k, v.attr)
contexts_updates.append((contexts[k], v))
for k, v in predict_initstates_shapes.items():
initstates[k] = kernel.shared(np.ones(predict_initstates_shapes[k], kernel.config.floatX), k,
predict_initstates_attr[k])
fn_contexts = kernel.compile([self._model_inputs['X'], self._model_inputs['X_Mask']], updates=contexts_updates, strict=False)
chosen_initstates = OrderedDict()
for k, v in initstates.items():
raw_shape = v.shape[2:]
raw_attr = v.attr[2:]
raw_flatten = v.reshape([batchsize * beamsize] + raw_shape, [None] + raw_attr)
chosen_initstate = raw_flatten[beamchoice.flatten()].reshape([batchsize, beamsize] + raw_shape,
['batch', 'search'] + raw_attr)
chosen_initstates[k] = chosen_initstate
probability, initstates_updates = self.apply_init_predict_step(contexts, chosen_initstates, batchsize, beamsize)
fn_step = kernel.compile([beamchoice], [probability], updates=initstates_updates, strict=False)
return contexts, initstates, fn_contexts, fn_step
def get_beamsearch_predict_function(self, catglorysize, batchsize, maxlen=50, beamsize=12):
contexts, initstates, init_predict, feed_step = self.init_beamsearch(batchsize, beamsize)
def gen_sample(*inputs):
# extract inputs
X, X_Mask = inputs[:2]
# initcontexts
# no returns since all the changes are about shared values
# we just update them
init_predict(X, X_Mask)
# init beamsearch vars
samples_all = []
trans_all = []
mod_all = []
scores_all = []
beamchoices = np.zeros([batchsize, beamsize], kernel.config.catX)
for _ in range(batchsize):
samples_all.append([])
trans_all.append([])
mod_all.append([])
scores_all.append([])
masks = np.ones([batchsize, beamsize], 'int8')
probsums = np.zeros([batchsize, beamsize], kernel.config.floatX)
# first search all the same for every search channel
prob_b_k_v = feed_step(beamchoices)[0]
prob_b_k_v = - np.log(prob_b_k_v)
prob_b_kv = prob_b_k_v[:, 0]
# beamsearch step by step
for n_step in range(1, maxlen + 1):
for batch in range(batchsize):
bmask = masks[batch]
btrans = trans_all[batch]
bmods = mod_all[batch]
bsample = samples_all[batch]
bscore = scores_all[batch]
prob = prob_b_kv[batch].flatten()
# how many live channel to search in this minibatch
# set mask for this minibatch
bnlive = bmask.sum()
bndead = beamsize - bmask.sum()
# shift live channel to left and dead ones to right
for i in range(beamsize):
if i < bnlive:
bmask[i] = 1
else:
bmask[i] = 0
# find top (k-dead_channel)
b_step_out = prob.argpartition(bnlive)[:bnlive]
# append trans and mod
b_step_trans = b_step_out // catglorysize
b_step_mod = b_step_out % catglorysize
btrans.append(b_step_trans)
bmods.append(b_step_mod)
# update beamchoices and probsums in this batch and pad inf score for dead channel to keep dim = beamsize
beamchoices[batch] = np.pad(b_step_trans, (0, bndead), 'constant',
constant_values=(0, 0))
probsums[batch] = np.pad(prob[b_step_out], (0, bndead), 'constant',
constant_values=(0, np.inf))
# build sample at final loop or <eos> predicted
for i, (t, m, o) in enumerate(zip(b_step_trans, b_step_mod, b_step_out)):
if m == 0 or n_step == maxlen:
bmask[i] = 0 # set corresbonding mask to 0
sample = [] # declare a new sample list
# trace back to step 0 find every predicted word in this channel
# k from n_step-1 to 0
ii = i # the ith word will be token corresponding to kth step
for k in range(n_step - 1, -1, -1):
word = bmods[k][ii]
sample.append(word)
ii = btrans[k][ii]
bsample.append(sample[::-1])
bscore.append(probsums[batch][i] / n_step)
# if all channels of all batches dead
# break loop
if (np.equal(masks, 0)).all():
break
# get probality and update states
prob_b_k_v = feed_step(beamchoices)[0]
prob_b_k_v = probsums[:, :, None] - np.log(prob_b_k_v * masks[:, :, None])
prob_b_kv = prob_b_k_v.reshape([batchsize, beamsize * catglorysize])
# return samples and score
return [sample[np.array(score).argmin()] for sample, score in
zip(samples_all, scores_all)]
return gen_sample | PypiClean |
/Loglan-Core-0.0.4.tar.gz/Loglan-Core-0.0.4/loglan_core/addons/word_getter.py | from typing import Union
from sqlalchemy import or_
from sqlalchemy.orm import Session
from loglan_core.connect_tables import t_connect_keys
from loglan_core.definition import BaseDefinition
from loglan_core.event import BaseEvent
from loglan_core.key import BaseKey
class AddonWordGetter:
"""AddonWordGetter model"""
@classmethod
def by_event(
cls, session: Session, event_id: int = None,
add_to=None):
"""Query filtered by specified Event (the latest by default)
Args:
session:
event_id: Union[BaseEvent, int]: Event object or Event.id (int) (Default value = None)
add_to:
Returns:
BaseQuery
"""
if not event_id:
event_id = BaseEvent.latest(session).id
request = add_to if add_to else session.query(cls)
return cls._filter_event(event_id, request).order_by(cls.name)
@classmethod
def _filter_event(cls, event_id: int, add_to):
return add_to.filter(cls.event_start_id <= event_id) \
.filter(or_(cls.event_end_id > event_id, cls.event_end_id.is_(None)))
@classmethod
def by_name(
cls, session: Session,
name: str, event_id: Union[BaseEvent, int] = None,
case_sensitive: bool = False, add_to=None):
"""Word.Query filtered by specified name
Args:
session:
event_id:
name: str:
case_sensitive: bool: (Default value = False)
add_to:
Returns:
BaseQuery
"""
request = add_to if add_to else session.query(cls)
name = name.replace("*", "%")
return cls.by_event(session, event_id, request).filter(
cls.name.like(name) if case_sensitive else cls.name.ilike(name)
)
@classmethod
def by_key(cls, session: Session,
key: Union[BaseKey, str],
language: str = None,
event_id: Union[BaseEvent, int] = None,
case_sensitive: bool = False, add_to=None):
"""Word.Query filtered by specified key
Args:
session:
key: Union[BaseKey, str]:
language: str: Language of key (Default value = None)
event_id: Union[BaseEvent, int]: (Default value = None)
case_sensitive: bool: (Default value = False)
add_to:
Returns:
"""
request = add_to if add_to else session.query(cls)
request = cls.by_event(session, event_id, request)
key = (BaseKey.word if isinstance(key, BaseKey) else str(key)).replace("*", "%")
request = request.join(BaseDefinition, t_connect_keys, BaseKey).filter(
BaseKey.word.like(key) if case_sensitive else BaseKey.word.ilike(key))
if language:
request = request.filter(BaseKey.language == language)
return request.order_by(cls.name) | PypiClean |
/Geocoding-1.4.3-py3-none-any.whl/geocoding/utils.py | SCALE = 7
def degree_to_int(degree):
"""Convert the float degree to int.
"""
fl = float(degree)
return int(fl * (10 ** SCALE))
def int_to_degree(integer):
"""Convert the int integer to a float.
"""
i = int(integer)
return float(i / (10 ** SCALE))
def pre_order(size):
"""List in pre order of integers ranging from 0 to size in a balanced
binary tree.
"""
interval_list = [None] * size
interval_list[0] = (0, size)
tail = 1
for head in range(size):
start, end = interval_list[head]
mid = (start + end) // 2
if mid > start:
interval_list[tail] = (start, mid)
tail += 1
if mid + 1 < end:
interval_list[tail] = (mid + 1, end)
tail += 1
interval_list[head] = mid
return interval_list
def search(element, indices, values, sorted=True):
"""Search element in a list of values.
Args:
element (int or str): The element to search.
indices (:obj:`list` of :obj:`int`): The list of indices to consider in
the search.
values (:obj:`list` of :obj:`int` or `str`): The list of values.
sorted (bool, optional): True if values are sorted, false otherwise.
Returns:
(:obj:`tuple` of :obj:`int):
(int: The index of the selected index in the indices list,
int: The index of the selected value in the values list)
"""
# If values is a sorted list, we only need the limits of that list
if sorted:
def get_index(i):
return i
start, end = (indices[0], indices[1])
# If values is not sorted, we need the list of indices of values if it was
# sorted
else:
def get_index(i):
return indices[min(i, len(indices) - 1)]
start, end = (0, len(indices))
return binary_search(element, values, start, end, get_index)
def binary_search(element, values, start, end, get_index):
"""Binary search of element in values.
"""
i = start
j = end
while i < j:
mid = (i + j) // 2
midvalue = values[get_index(mid)]
if midvalue < element:
i = mid + 1
else:
j = mid
return (i, get_index(i))
def most_similar(indices, values, similarity):
"""Find the value with greatest score of similarity.
"""
max_result = None, None, None
for (rang, index) in enumerate(indices):
score = similarity(values[index])
max_score = max_result[0]
if score == 1:
return score, rang, index
elif max_score is None or score > max_score:
max_result = score, rang, index
return max_result | PypiClean |
/AltAnalyze-2.1.3.15.tar.gz/AltAnalyze-2.1.3.15/altanalyze/stats_scripts/mpmath/matrices/matrices.py | from ..libmp.backend import xrange
# TODO: interpret list as vectors (for multiplication)
rowsep = '\n'
colsep = ' '
class _matrix(object):
"""
Numerical matrix.
Specify the dimensions or the data as a nested list.
Elements default to zero.
Use a flat list to create a column vector easily.
By default, only mpf is used to store the data. You can specify another type
using force_type=type. It's possible to specify None.
Make sure force_type(force_type()) is fast.
Creating matrices
-----------------
Matrices in mpmath are implemented using dictionaries. Only non-zero values
are stored, so it is cheap to represent sparse matrices.
The most basic way to create one is to use the ``matrix`` class directly.
You can create an empty matrix specifying the dimensions:
>>> from mpmath import *
>>> mp.dps = 15
>>> matrix(2)
matrix(
[['0.0', '0.0'],
['0.0', '0.0']])
>>> matrix(2, 3)
matrix(
[['0.0', '0.0', '0.0'],
['0.0', '0.0', '0.0']])
Calling ``matrix`` with one dimension will create a square matrix.
To access the dimensions of a matrix, use the ``rows`` or ``cols`` keyword:
>>> A = matrix(3, 2)
>>> A
matrix(
[['0.0', '0.0'],
['0.0', '0.0'],
['0.0', '0.0']])
>>> A.rows
3
>>> A.cols
2
You can also change the dimension of an existing matrix. This will set the
new elements to 0. If the new dimension is smaller than before, the
concerning elements are discarded:
>>> A.rows = 2
>>> A
matrix(
[['0.0', '0.0'],
['0.0', '0.0']])
Internally ``mpmathify`` is used every time an element is set. This
is done using the syntax A[row,column], counting from 0:
>>> A = matrix(2)
>>> A[1,1] = 1 + 1j
>>> A
matrix(
[['0.0', '0.0'],
['0.0', '(1.0 + 1.0j)']])
You can use the keyword ``force_type`` to change the function which is
called on every new element:
>>> matrix(2, 5, force_type=int)
matrix(
[[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0]])
A more comfortable way to create a matrix lets you use nested lists:
>>> matrix([[1, 2], [3, 4]])
matrix(
[['1.0', '2.0'],
['3.0', '4.0']])
If you want to preserve the type of the elements you can use
``force_type=None``:
>>> matrix([[1, 2.5], [1j, mpf(2)]], force_type=None)
matrix(
[[1, 2.5],
[1j, '2.0']])
Convenient advanced functions are available for creating various standard
matrices, see ``zeros``, ``ones``, ``diag``, ``eye``, ``randmatrix`` and
``hilbert``.
Vectors
.......
Vectors may also be represented by the ``matrix`` class (with rows = 1 or cols = 1).
For vectors there are some things which make life easier. A column vector can
be created using a flat list, a row vectors using an almost flat nested list::
>>> matrix([1, 2, 3])
matrix(
[['1.0'],
['2.0'],
['3.0']])
>>> matrix([[1, 2, 3]])
matrix(
[['1.0', '2.0', '3.0']])
Optionally vectors can be accessed like lists, using only a single index::
>>> x = matrix([1, 2, 3])
>>> x[1]
mpf('2.0')
>>> x[1,0]
mpf('2.0')
Other
.....
Like you probably expected, matrices can be printed::
>>> print randmatrix(3) # doctest:+SKIP
[ 0.782963853573023 0.802057689719883 0.427895717335467]
[0.0541876859348597 0.708243266653103 0.615134039977379]
[ 0.856151514955773 0.544759264818486 0.686210904770947]
Use ``nstr`` or ``nprint`` to specify the number of digits to print::
>>> nprint(randmatrix(5), 3) # doctest:+SKIP
[2.07e-1 1.66e-1 5.06e-1 1.89e-1 8.29e-1]
[6.62e-1 6.55e-1 4.47e-1 4.82e-1 2.06e-2]
[4.33e-1 7.75e-1 6.93e-2 2.86e-1 5.71e-1]
[1.01e-1 2.53e-1 6.13e-1 3.32e-1 2.59e-1]
[1.56e-1 7.27e-2 6.05e-1 6.67e-2 2.79e-1]
As matrices are mutable, you will need to copy them sometimes::
>>> A = matrix(2)
>>> A
matrix(
[['0.0', '0.0'],
['0.0', '0.0']])
>>> B = A.copy()
>>> B[0,0] = 1
>>> B
matrix(
[['1.0', '0.0'],
['0.0', '0.0']])
>>> A
matrix(
[['0.0', '0.0'],
['0.0', '0.0']])
Finally, it is possible to convert a matrix to a nested list. This is very useful,
as most Python libraries involving matrices or arrays (namely NumPy or SymPy)
support this format::
>>> B.tolist()
[[mpf('1.0'), mpf('0.0')], [mpf('0.0'), mpf('0.0')]]
Matrix operations
-----------------
You can add and subtract matrices of compatible dimensions::
>>> A = matrix([[1, 2], [3, 4]])
>>> B = matrix([[-2, 4], [5, 9]])
>>> A + B
matrix(
[['-1.0', '6.0'],
['8.0', '13.0']])
>>> A - B
matrix(
[['3.0', '-2.0'],
['-2.0', '-5.0']])
>>> A + ones(3) # doctest:+ELLIPSIS
Traceback (most recent call last):
...
ValueError: incompatible dimensions for addition
It is possible to multiply or add matrices and scalars. In the latter case the
operation will be done element-wise::
>>> A * 2
matrix(
[['2.0', '4.0'],
['6.0', '8.0']])
>>> A / 4
matrix(
[['0.25', '0.5'],
['0.75', '1.0']])
>>> A - 1
matrix(
[['0.0', '1.0'],
['2.0', '3.0']])
Of course you can perform matrix multiplication, if the dimensions are
compatible::
>>> A * B
matrix(
[['8.0', '22.0'],
['14.0', '48.0']])
>>> matrix([[1, 2, 3]]) * matrix([[-6], [7], [-2]])
matrix(
[['2.0']])
You can raise powers of square matrices::
>>> A**2
matrix(
[['7.0', '10.0'],
['15.0', '22.0']])
Negative powers will calculate the inverse::
>>> A**-1
matrix(
[['-2.0', '1.0'],
['1.5', '-0.5']])
>>> A * A**-1
matrix(
[['1.0', '1.0842021724855e-19'],
['-2.16840434497101e-19', '1.0']])
Matrix transposition is straightforward::
>>> A = ones(2, 3)
>>> A
matrix(
[['1.0', '1.0', '1.0'],
['1.0', '1.0', '1.0']])
>>> A.T
matrix(
[['1.0', '1.0'],
['1.0', '1.0'],
['1.0', '1.0']])
Norms
.....
Sometimes you need to know how "large" a matrix or vector is. Due to their
multidimensional nature it's not possible to compare them, but there are
several functions to map a matrix or a vector to a positive real number, the
so called norms.
For vectors the p-norm is intended, usually the 1-, the 2- and the oo-norm are
used.
>>> x = matrix([-10, 2, 100])
>>> norm(x, 1)
mpf('112.0')
>>> norm(x, 2)
mpf('100.5186549850325')
>>> norm(x, inf)
mpf('100.0')
Please note that the 2-norm is the most used one, though it is more expensive
to calculate than the 1- or oo-norm.
It is possible to generalize some vector norms to matrix norm::
>>> A = matrix([[1, -1000], [100, 50]])
>>> mnorm(A, 1)
mpf('1050.0')
>>> mnorm(A, inf)
mpf('1001.0')
>>> mnorm(A, 'F')
mpf('1006.2310867787777')
The last norm (the "Frobenius-norm") is an approximation for the 2-norm, which
is hard to calculate and not available. The Frobenius-norm lacks some
mathematical properties you might expect from a norm.
"""
def __init__(self, *args, **kwargs):
self.__data = {}
# LU decompostion cache, this is useful when solving the same system
# multiple times, when calculating the inverse and when calculating the
# determinant
self._LU = None
convert = kwargs.get('force_type', self.ctx.convert)
if isinstance(args[0], (list, tuple)):
if isinstance(args[0][0], (list, tuple)):
# interpret nested list as matrix
A = args[0]
self.__rows = len(A)
self.__cols = len(A[0])
for i, row in enumerate(A):
for j, a in enumerate(row):
self[i, j] = convert(a)
else:
# interpret list as row vector
v = args[0]
self.__rows = len(v)
self.__cols = 1
for i, e in enumerate(v):
self[i, 0] = e
elif isinstance(args[0], int):
# create empty matrix of given dimensions
if len(args) == 1:
self.__rows = self.__cols = args[0]
else:
assert isinstance(args[1], int), 'expected int'
self.__rows = args[0]
self.__cols = args[1]
elif isinstance(args[0], _matrix):
A = args[0].copy()
self.__data = A._matrix__data
self.__rows = A._matrix__rows
self.__cols = A._matrix__cols
convert = kwargs.get('force_type', self.ctx.convert)
for i in xrange(A.__rows):
for j in xrange(A.__cols):
A[i,j] = convert(A[i,j])
elif hasattr(args[0], 'tolist'):
A = self.ctx.matrix(args[0].tolist())
self.__data = A._matrix__data
self.__rows = A._matrix__rows
self.__cols = A._matrix__cols
else:
raise TypeError('could not interpret given arguments')
def apply(self, f):
"""
Return a copy of self with the function `f` applied elementwise.
"""
new = self.ctx.matrix(self.__rows, self.__cols)
for i in xrange(self.__rows):
for j in xrange(self.__cols):
new[i,j] = f(self[i,j])
return new
def __nstr__(self, n=None, **kwargs):
# Build table of string representations of the elements
res = []
# Track per-column max lengths for pretty alignment
maxlen = [0] * self.cols
for i in range(self.rows):
res.append([])
for j in range(self.cols):
if n:
string = self.ctx.nstr(self[i,j], n, **kwargs)
else:
string = str(self[i,j])
res[-1].append(string)
maxlen[j] = max(len(string), maxlen[j])
# Patch strings together
for i, row in enumerate(res):
for j, elem in enumerate(row):
# Pad each element up to maxlen so the columns line up
row[j] = elem.rjust(maxlen[j])
res[i] = "[" + colsep.join(row) + "]"
return rowsep.join(res)
def __str__(self):
return self.__nstr__()
def _toliststr(self, avoid_type=False):
"""
Create a list string from a matrix.
If avoid_type: avoid multiple 'mpf's.
"""
# XXX: should be something like self.ctx._types
typ = self.ctx.mpf
s = '['
for i in xrange(self.__rows):
s += '['
for j in xrange(self.__cols):
if not avoid_type or not isinstance(self[i,j], typ):
a = repr(self[i,j])
else:
a = "'" + str(self[i,j]) + "'"
s += a + ', '
s = s[:-2]
s += '],\n '
s = s[:-3]
s += ']'
return s
def tolist(self):
"""
Convert the matrix to a nested list.
"""
return [[self[i,j] for j in range(self.__cols)] for i in range(self.__rows)]
def __repr__(self):
if self.ctx.pretty:
return self.__str__()
s = 'matrix(\n'
s += self._toliststr(avoid_type=True) + ')'
return s
def __get_element(self, key):
'''
Fast extraction of the i,j element from the matrix
This function is for private use only because is unsafe:
1. Does not check on the value of key it expects key to be a integer tuple (i,j)
2. Does not check bounds
'''
if key in self.__data:
return self.__data[key]
else:
return self.ctx.zero
def __set_element(self, key, value):
'''
Fast assignment of the i,j element in the matrix
This function is unsafe:
1. Does not check on the value of key it expects key to be a integer tuple (i,j)
2. Does not check bounds
3. Does not check the value type
'''
if value: # only store non-zeros
self.__data[key] = value
elif key in self.__data:
del self.__data[key]
def __getitem__(self, key):
'''
Getitem function for mp matrix class with slice index enabled
it allows the following assingments
scalar to a slice of the matrix
B = A[:,2:6]
'''
# Convert vector to matrix indexing
if isinstance(key, int) or isinstance(key,slice):
# only sufficent for vectors
if self.__rows == 1:
key = (0, key)
elif self.__cols == 1:
key = (key, 0)
else:
raise IndexError('insufficient indices for matrix')
if isinstance(key[0],slice) or isinstance(key[1],slice):
#Rows
if isinstance(key[0],slice):
#Check bounds
if (key[0].start is None or key[0].start >= 0) and \
(key[0].stop is None or key[0].stop <= self.__rows+1):
# Generate indices
rows = xrange(*key[0].indices(self.__rows))
else:
raise IndexError('Row index out of bounds')
else:
# Single row
rows = [key[0]]
# Columns
if isinstance(key[1],slice):
# Check bounds
if (key[1].start is None or key[1].start >= 0) and \
(key[1].stop is None or key[1].stop <= self.__cols+1):
# Generate indices
columns = xrange(*key[1].indices(self.__cols))
else:
raise IndexError('Column index out of bounds')
else:
# Single column
columns = [key[1]]
# Create matrix slice
m = self.ctx.matrix(len(rows),len(columns))
# Assign elements to the output matrix
for i,x in enumerate(rows):
for j,y in enumerate(columns):
m.__set_element((i,j),self.__get_element((x,y)))
return m
else:
# single element extraction
if key[0] >= self.__rows or key[1] >= self.__cols:
raise IndexError('matrix index out of range')
if key in self.__data:
return self.__data[key]
else:
return self.ctx.zero
def __setitem__(self, key, value):
# setitem function for mp matrix class with slice index enabled
# it allows the following assingments
# scalar to a slice of the matrix
# A[:,2:6] = 2.5
# submatrix to matrix (the value matrix should be the same size as the slice size)
# A[3,:] = B where A is n x m and B is n x 1
# Convert vector to matrix indexing
if isinstance(key, int) or isinstance(key,slice):
# only sufficent for vectors
if self.__rows == 1:
key = (0, key)
elif self.__cols == 1:
key = (key, 0)
else:
raise IndexError('insufficient indices for matrix')
# Slice indexing
if isinstance(key[0],slice) or isinstance(key[1],slice):
# Rows
if isinstance(key[0],slice):
# Check bounds
if (key[0].start is None or key[0].start >= 0) and \
(key[0].stop is None or key[0].stop <= self.__rows+1):
# generate row indices
rows = xrange(*key[0].indices(self.__rows))
else:
raise IndexError('Row index out of bounds')
else:
# Single row
rows = [key[0]]
# Columns
if isinstance(key[1],slice):
# Check bounds
if (key[1].start is None or key[1].start >= 0) and \
(key[1].stop is None or key[1].stop <= self.__cols+1):
# Generate column indices
columns = xrange(*key[1].indices(self.__cols))
else:
raise IndexError('Column index out of bounds')
else:
# Single column
columns = [key[1]]
# Assign slice with a scalar
if isinstance(value,self.ctx.matrix):
# Assign elements to matrix if input and output dimensions match
if len(rows) == value.rows and len(columns) == value.cols:
for i,x in enumerate(rows):
for j,y in enumerate(columns):
self.__set_element((x,y), value.__get_element((i,j)))
else:
raise ValueError('Dimensions do not match')
else:
# Assign slice with scalars
value = self.ctx.convert(value)
for i in rows:
for j in columns:
self.__set_element((i,j), value)
else:
# Single element assingment
# Check bounds
if key[0] >= self.__rows or key[1] >= self.__cols:
raise IndexError('matrix index out of range')
# Convert and store value
value = self.ctx.convert(value)
if value: # only store non-zeros
self.__data[key] = value
elif key in self.__data:
del self.__data[key]
if self._LU:
self._LU = None
return
def __iter__(self):
for i in xrange(self.__rows):
for j in xrange(self.__cols):
yield self[i,j]
def __mul__(self, other):
if isinstance(other, self.ctx.matrix):
# dot multiplication TODO: use Strassen's method?
if self.__cols != other.__rows:
raise ValueError('dimensions not compatible for multiplication')
new = self.ctx.matrix(self.__rows, other.__cols)
for i in xrange(self.__rows):
for j in xrange(other.__cols):
new[i, j] = self.ctx.fdot((self[i,k], other[k,j])
for k in xrange(other.__rows))
return new
else:
# try scalar multiplication
new = self.ctx.matrix(self.__rows, self.__cols)
for i in xrange(self.__rows):
for j in xrange(self.__cols):
new[i, j] = other * self[i, j]
return new
def __rmul__(self, other):
# assume other is scalar and thus commutative
assert not isinstance(other, self.ctx.matrix)
return self.__mul__(other)
def __pow__(self, other):
# avoid cyclic import problems
#from linalg import inverse
if not isinstance(other, int):
raise ValueError('only integer exponents are supported')
if not self.__rows == self.__cols:
raise ValueError('only powers of square matrices are defined')
n = other
if n == 0:
return self.ctx.eye(self.__rows)
if n < 0:
n = -n
neg = True
else:
neg = False
i = n
y = 1
z = self.copy()
while i != 0:
if i % 2 == 1:
y = y * z
z = z*z
i = i // 2
if neg:
y = self.ctx.inverse(y)
return y
def __div__(self, other):
# assume other is scalar and do element-wise divison
assert not isinstance(other, self.ctx.matrix)
new = self.ctx.matrix(self.__rows, self.__cols)
for i in xrange(self.__rows):
for j in xrange(self.__cols):
new[i,j] = self[i,j] / other
return new
__truediv__ = __div__
def __add__(self, other):
if isinstance(other, self.ctx.matrix):
if not (self.__rows == other.__rows and self.__cols == other.__cols):
raise ValueError('incompatible dimensions for addition')
new = self.ctx.matrix(self.__rows, self.__cols)
for i in xrange(self.__rows):
for j in xrange(self.__cols):
new[i,j] = self[i,j] + other[i,j]
return new
else:
# assume other is scalar and add element-wise
new = self.ctx.matrix(self.__rows, self.__cols)
for i in xrange(self.__rows):
for j in xrange(self.__cols):
new[i,j] += self[i,j] + other
return new
def __radd__(self, other):
return self.__add__(other)
def __sub__(self, other):
if isinstance(other, self.ctx.matrix) and not (self.__rows == other.__rows
and self.__cols == other.__cols):
raise ValueError('incompatible dimensions for substraction')
return self.__add__(other * (-1))
def __neg__(self):
return (-1) * self
def __rsub__(self, other):
return -self + other
def __eq__(self, other):
return self.__rows == other.__rows and self.__cols == other.__cols \
and self.__data == other.__data
def __len__(self):
if self.rows == 1:
return self.cols
elif self.cols == 1:
return self.rows
else:
return self.rows # do it like numpy
def __getrows(self):
return self.__rows
def __setrows(self, value):
for key in self.__data.copy():
if key[0] >= value:
del self.__data[key]
self.__rows = value
rows = property(__getrows, __setrows, doc='number of rows')
def __getcols(self):
return self.__cols
def __setcols(self, value):
for key in self.__data.copy():
if key[1] >= value:
del self.__data[key]
self.__cols = value
cols = property(__getcols, __setcols, doc='number of columns')
def transpose(self):
new = self.ctx.matrix(self.__cols, self.__rows)
for i in xrange(self.__rows):
for j in xrange(self.__cols):
new[j,i] = self[i,j]
return new
T = property(transpose)
def conjugate(self):
return self.apply(self.ctx.conj)
def transpose_conj(self):
return self.conjugate().transpose()
H = property(transpose_conj)
def copy(self):
new = self.ctx.matrix(self.__rows, self.__cols)
new.__data = self.__data.copy()
return new
__copy__ = copy
def column(self, n):
m = self.ctx.matrix(self.rows, 1)
for i in range(self.rows):
m[i] = self[i,n]
return m
class MatrixMethods(object):
def __init__(ctx):
# XXX: subclass
ctx.matrix = type('matrix', (_matrix,), {})
ctx.matrix.ctx = ctx
ctx.matrix.convert = ctx.convert
def eye(ctx, n, **kwargs):
"""
Create square identity matrix n x n.
"""
A = ctx.matrix(n, **kwargs)
for i in xrange(n):
A[i,i] = 1
return A
def diag(ctx, diagonal, **kwargs):
"""
Create square diagonal matrix using given list.
Example:
>>> from mpmath import diag, mp
>>> mp.pretty = False
>>> diag([1, 2, 3])
matrix(
[['1.0', '0.0', '0.0'],
['0.0', '2.0', '0.0'],
['0.0', '0.0', '3.0']])
"""
A = ctx.matrix(len(diagonal), **kwargs)
for i in xrange(len(diagonal)):
A[i,i] = diagonal[i]
return A
def zeros(ctx, *args, **kwargs):
"""
Create matrix m x n filled with zeros.
One given dimension will create square matrix n x n.
Example:
>>> from mpmath import zeros, mp
>>> mp.pretty = False
>>> zeros(2)
matrix(
[['0.0', '0.0'],
['0.0', '0.0']])
"""
if len(args) == 1:
m = n = args[0]
elif len(args) == 2:
m = args[0]
n = args[1]
else:
raise TypeError('zeros expected at most 2 arguments, got %i' % len(args))
A = ctx.matrix(m, n, **kwargs)
for i in xrange(m):
for j in xrange(n):
A[i,j] = 0
return A
def ones(ctx, *args, **kwargs):
"""
Create matrix m x n filled with ones.
One given dimension will create square matrix n x n.
Example:
>>> from mpmath import ones, mp
>>> mp.pretty = False
>>> ones(2)
matrix(
[['1.0', '1.0'],
['1.0', '1.0']])
"""
if len(args) == 1:
m = n = args[0]
elif len(args) == 2:
m = args[0]
n = args[1]
else:
raise TypeError('ones expected at most 2 arguments, got %i' % len(args))
A = ctx.matrix(m, n, **kwargs)
for i in xrange(m):
for j in xrange(n):
A[i,j] = 1
return A
def hilbert(ctx, m, n=None):
"""
Create (pseudo) hilbert matrix m x n.
One given dimension will create hilbert matrix n x n.
The matrix is very ill-conditioned and symmetric, positive definite if
square.
"""
if n is None:
n = m
A = ctx.matrix(m, n)
for i in xrange(m):
for j in xrange(n):
A[i,j] = ctx.one / (i + j + 1)
return A
def randmatrix(ctx, m, n=None, min=0, max=1, **kwargs):
"""
Create a random m x n matrix.
All values are >= min and <max.
n defaults to m.
Example:
>>> from mpmath import randmatrix
>>> randmatrix(2) # doctest:+SKIP
matrix(
[['0.53491598236191806', '0.57195669543302752'],
['0.85589992269513615', '0.82444367501382143']])
"""
if not n:
n = m
A = ctx.matrix(m, n, **kwargs)
for i in xrange(m):
for j in xrange(n):
A[i,j] = ctx.rand() * (max - min) + min
return A
def swap_row(ctx, A, i, j):
"""
Swap row i with row j.
"""
if i == j:
return
if isinstance(A, ctx.matrix):
for k in xrange(A.cols):
A[i,k], A[j,k] = A[j,k], A[i,k]
elif isinstance(A, list):
A[i], A[j] = A[j], A[i]
else:
raise TypeError('could not interpret type')
def extend(ctx, A, b):
"""
Extend matrix A with column b and return result.
"""
assert isinstance(A, ctx.matrix)
assert A.rows == len(b)
A = A.copy()
A.cols += 1
for i in xrange(A.rows):
A[i, A.cols-1] = b[i]
return A
def norm(ctx, x, p=2):
r"""
Gives the entrywise `p`-norm of an iterable *x*, i.e. the vector norm
`\left(\sum_k |x_k|^p\right)^{1/p}`, for any given `1 \le p \le \infty`.
Special cases:
If *x* is not iterable, this just returns ``absmax(x)``.
``p=1`` gives the sum of absolute values.
``p=2`` is the standard Euclidean vector norm.
``p=inf`` gives the magnitude of the largest element.
For *x* a matrix, ``p=2`` is the Frobenius norm.
For operator matrix norms, use :func:`~mpmath.mnorm` instead.
You can use the string 'inf' as well as float('inf') or mpf('inf')
to specify the infinity norm.
**Examples**
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = False
>>> x = matrix([-10, 2, 100])
>>> norm(x, 1)
mpf('112.0')
>>> norm(x, 2)
mpf('100.5186549850325')
>>> norm(x, inf)
mpf('100.0')
"""
try:
iter(x)
except TypeError:
return ctx.absmax(x)
if type(p) is not int:
p = ctx.convert(p)
if p == ctx.inf:
return max(ctx.absmax(i) for i in x)
elif p == 1:
return ctx.fsum(x, absolute=1)
elif p == 2:
return ctx.sqrt(ctx.fsum(x, absolute=1, squared=1))
elif p > 1:
return ctx.nthroot(ctx.fsum(abs(i)**p for i in x), p)
else:
raise ValueError('p has to be >= 1')
def mnorm(ctx, A, p=1):
r"""
Gives the matrix (operator) `p`-norm of A. Currently ``p=1`` and ``p=inf``
are supported:
``p=1`` gives the 1-norm (maximal column sum)
``p=inf`` gives the `\infty`-norm (maximal row sum).
You can use the string 'inf' as well as float('inf') or mpf('inf')
``p=2`` (not implemented) for a square matrix is the usual spectral
matrix norm, i.e. the largest singular value.
``p='f'`` (or 'F', 'fro', 'Frobenius, 'frobenius') gives the
Frobenius norm, which is the elementwise 2-norm. The Frobenius norm is an
approximation of the spectral norm and satisfies
.. math ::
\frac{1}{\sqrt{\mathrm{rank}(A)}} \|A\|_F \le \|A\|_2 \le \|A\|_F
The Frobenius norm lacks some mathematical properties that might
be expected of a norm.
For general elementwise `p`-norms, use :func:`~mpmath.norm` instead.
**Examples**
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = False
>>> A = matrix([[1, -1000], [100, 50]])
>>> mnorm(A, 1)
mpf('1050.0')
>>> mnorm(A, inf)
mpf('1001.0')
>>> mnorm(A, 'F')
mpf('1006.2310867787777')
"""
A = ctx.matrix(A)
if type(p) is not int:
if type(p) is str and 'frobenius'.startswith(p.lower()):
return ctx.norm(A, 2)
p = ctx.convert(p)
m, n = A.rows, A.cols
if p == 1:
return max(ctx.fsum((A[i,j] for i in xrange(m)), absolute=1) for j in xrange(n))
elif p == ctx.inf:
return max(ctx.fsum((A[i,j] for j in xrange(n)), absolute=1) for i in xrange(m))
else:
raise NotImplementedError("matrix p-norm for arbitrary p")
if __name__ == '__main__':
import doctest
doctest.testmod() | PypiClean |
/FLAML-2.0.2-py3-none-any.whl/flaml/tune/searcher/search_thread.py | from typing import Dict, Optional
import numpy as np
try:
from ray import __version__ as ray_version
assert ray_version >= "1.10.0"
if ray_version.startswith("1."):
from ray.tune.suggest import Searcher
else:
from ray.tune.search import Searcher
except (ImportError, AssertionError):
from .suggestion import Searcher
from .flow2 import FLOW2
from ..space import add_cost_to_space, unflatten_hierarchical
from ..result import TIME_TOTAL_S
import logging
logger = logging.getLogger(__name__)
class SearchThread:
"""Class of global or local search thread."""
def __init__(
self,
mode: str = "min",
search_alg: Optional[Searcher] = None,
cost_attr: Optional[str] = TIME_TOTAL_S,
eps: Optional[float] = 1.0,
):
"""When search_alg is omitted, use local search FLOW2."""
self._search_alg = search_alg
self._is_ls = isinstance(search_alg, FLOW2)
self._mode = mode
self._metric_op = 1 if mode == "min" else -1
self.cost_best = self.cost_last = self.cost_total = self.cost_best1 = getattr(search_alg, "cost_incumbent", 0)
self._eps = eps
self.cost_best2 = 0
self.obj_best1 = self.obj_best2 = getattr(search_alg, "best_obj", np.inf) # inherently minimize
self.best_result = None
# eci: estimated cost for improvement
self.eci = self.cost_best
self.priority = self.speed = 0
self._init_config = True
self.running = 0 # the number of running trials from the thread
self.cost_attr = cost_attr
if search_alg:
self.space = self._space = search_alg.space # unflattened space
if self.space and not isinstance(search_alg, FLOW2) and isinstance(search_alg._space, dict):
# remember const config
self._const = add_cost_to_space(self.space, {}, {})
def suggest(self, trial_id: str) -> Optional[Dict]:
"""Use the suggest() of the underlying search algorithm."""
if isinstance(self._search_alg, FLOW2):
config = self._search_alg.suggest(trial_id)
else:
try:
config = self._search_alg.suggest(trial_id)
if isinstance(self._search_alg._space, dict):
config.update(self._const)
else:
# define by run
config, self.space = unflatten_hierarchical(config, self._space)
except FloatingPointError:
logger.warning("The global search method raises FloatingPointError. " "Ignoring for this iteration.")
config = None
if config is not None:
self.running += 1
return config
def update_priority(self, eci: Optional[float] = 0):
# optimistic projection
self.priority = eci * self.speed - self.obj_best1
def update_eci(self, metric_target: float, max_speed: Optional[float] = np.inf):
# calculate eci: estimated cost for improvement over metric_target
best_obj = metric_target * self._metric_op
if not self.speed:
self.speed = max_speed
self.eci = max(self.cost_total - self.cost_best1, self.cost_best1 - self.cost_best2)
if self.obj_best1 > best_obj and self.speed > 0:
self.eci = max(self.eci, 2 * (self.obj_best1 - best_obj) / self.speed)
def _update_speed(self):
# calculate speed; use 0 for invalid speed temporarily
if self.obj_best2 > self.obj_best1:
# discount the speed if there are unfinished trials
self.speed = (
(self.obj_best2 - self.obj_best1) / self.running / (max(self.cost_total - self.cost_best2, self._eps))
)
else:
self.speed = 0
def on_trial_complete(self, trial_id: str, result: Optional[Dict] = None, error: bool = False):
"""Update the statistics of the thread."""
if not self._search_alg:
return
if not hasattr(self._search_alg, "_ot_trials") or (not error and trial_id in self._search_alg._ot_trials):
# optuna doesn't handle error
if self._is_ls or not self._init_config:
try:
self._search_alg.on_trial_complete(trial_id, result, error)
except RuntimeError as e:
# rs is used in place of optuna sometimes
if not str(e).endswith("has already finished and can not be updated."):
raise e
else:
# init config is not proposed by self._search_alg
# under this thread
self._init_config = False
if result:
self.cost_last = result.get(self.cost_attr, 1)
self.cost_total += self.cost_last
if self._search_alg.metric in result and (getattr(self._search_alg, "lexico_objectives", None) is None):
# TODO: Improve this behavior. When lexico_objectives is provided to CFO,
# related variables are not callable.
obj = result[self._search_alg.metric] * self._metric_op
if obj < self.obj_best1 or self.best_result is None:
self.cost_best2 = self.cost_best1
self.cost_best1 = self.cost_total
self.obj_best2 = obj if np.isinf(self.obj_best1) else self.obj_best1
self.obj_best1 = obj
self.cost_best = self.cost_last
self.best_result = result
if getattr(self._search_alg, "lexico_objectives", None) is None:
# TODO: Improve this behavior. When lexico_objectives is provided to CFO,
# related variables are not callable.
self._update_speed()
self.running -= 1
assert self.running >= 0
def on_trial_result(self, trial_id: str, result: Dict):
# TODO update the statistics of the thread with partial result?
if not self._search_alg:
return
if not hasattr(self._search_alg, "_ot_trials") or (trial_id in self._search_alg._ot_trials):
try:
self._search_alg.on_trial_result(trial_id, result)
except RuntimeError as e:
# rs is used in place of optuna sometimes
if not str(e).endswith("has already finished and can not be updated."):
raise e
new_cost = result.get(self.cost_attr, 1)
if self.cost_last < new_cost:
self.cost_last = new_cost
# self._update_speed()
@property
def converged(self) -> bool:
return self._search_alg.converged
@property
def resource(self) -> float:
return self._search_alg.resource
def reach(self, thread) -> bool:
"""Whether the incumbent can reach the incumbent of thread."""
return self._search_alg.reach(thread._search_alg)
@property
def can_suggest(self) -> bool:
"""Whether the thread can suggest new configs."""
return self._search_alg.can_suggest | PypiClean |
/NVDA-addonTemplate-0.5.2.zip/NVDA-addonTemplate-0.5.2/NVDAAddonTemplate/data/{{cookiecutter.project_slug}}/important.md | # {{ cookiecutter.projectName }} #
{{ cookiecutter.projectName }} was written by {{ cookiecutter.authorName }}
{% if cookiecutter.addonURL != "None" %}
You can find out more about this add-on at {{ cookiecutter.addonURL }}.
{% endif %}
To build this addon, please visit [building.md]
{{ cookiecutter.authorName }}, Welcome to your new add-on. Please write instructions on how to use your addon in this file. When you compile your addon, users can get help with it by activating addon help from the list item containing your addon in the Addons manager in NVDA. Good documentation is important for your success as an add-on author. Good documentation is esential for a stable add-on, and will persuade potential users to give your add-on a try! I hope you found this add-on creation process easy. If you have any feedback, or instructions, please post to [The add-on development list](http://www.freelists.org/list/nvda-addons).
Additionally, Please review the [Add-ons development pages](http://addons.nvda-project.org/dev.en.html), and you are strongly encouraged to submit your add-on for review. The review process checks for 5 things.
1. Is your code legal (It must be GPL V.2 since NVDA is GPL V.2. and your add-on is a part of NVDA during execution. Also, add-ons that pirate speech synthesisers, or do other illegal things will not be accepted.
2. Does your code do anything dangerous or outright malicious?
3. Does your code contain any obvious security voneribilities that effect NVDA? Note that if a security vonerability is found, you will be asked to fix it before releasing your next version.
4. Does the add-on break functionality of NVDA in potentially annoying ways, or make NVDA hard to use? (Simple quality check).
5. Does your add-on documentation contain enough information that someone who doesn't look at the code could use it? | PypiClean |
/OctoBot-Tentacles-Manager-2.9.4.tar.gz/OctoBot-Tentacles-Manager-2.9.4/octobot_tentacles_manager/workers/install_worker.py | import asyncio
import octobot_tentacles_manager.managers as managers
import octobot_tentacles_manager.workers as workers
import octobot_tentacles_manager.models as models
import octobot_tentacles_manager.util as util
class InstallWorker(workers.TentaclesWorker):
async def process(self, name_filter=None) -> int:
await self.tentacles_setup_manager.create_missing_tentacles_arch()
self.reset_worker()
self.progress = 1
all_tentacles = util.load_tentacle_with_metadata(self.reference_tentacles_root)
self.available_tentacles = util.load_tentacle_with_metadata(self.tentacle_path)
self.register_error_on_missing_tentacles(all_tentacles, name_filter)
to_install_tentacles = [tentacle
for tentacle in all_tentacles
if self._should_tentacle_be_processed(tentacle, name_filter)]
self.total_steps = len(to_install_tentacles)
self.register_to_process_tentacles_modules(to_install_tentacles)
await asyncio.gather(*[self._install_tentacle(tentacle) for tentacle in to_install_tentacles])
# install profiles if any
self._import_profiles_if_any()
# now that profiles are imported, update tentacles setup config
# and include missing tentacles in profile tentacles config
self.tentacles_setup_manager.refresh_user_tentacles_setup_config_file(
self.tentacles_setup_config_to_update,
self.tentacles_path_or_url,
force_update_registered_tentacles=True,
newly_installed_tentacles=to_install_tentacles)
self.tentacles_setup_manager.cleanup_temp_dirs()
self.log_summary()
return len(self.errors)
def _should_tentacle_be_processed(self, tentacle, name_filter):
return name_filter is None or tentacle.name in name_filter
async def _install_tentacle(self, tentacle):
try:
if tentacle.name not in self.processed_tentacles_modules:
self.processed_tentacles_modules.append(tentacle.name)
await self.handle_requirements(tentacle, self._try_install_from_requirements)
tentacle_manager = managers.TentacleManager(tentacle, self.bot_installation_path)
await tentacle_manager.install_tentacle(self.tentacle_path)
managers.update_tentacle_type_init_file(tentacle, tentacle_manager.target_tentacle_path)
if not self.quite_mode:
self.logger.info(f"[{self.progress}/{self.total_steps}] installed {tentacle}")
except Exception as e:
message = f"Error when installing {tentacle.name}: {e}"
self.errors.append(message)
self.logger.exception(e, True, message)
finally:
self.progress += 1
async def _try_install_from_requirements(self, tentacle, missing_requirements):
for requirement, version in missing_requirements.items():
if managers.TentacleManager.is_requirement_satisfied(requirement, version, tentacle,
self.fetched_for_requirements_tentacles_versions,
self.available_tentacles):
to_install_tentacle = models.Tentacle.find(self.fetched_for_requirements_tentacles, requirement)
if to_install_tentacle is not None:
await self._install_tentacle(to_install_tentacle)
else:
raise RuntimeError(f"Can't find {requirement} tentacle required for {tentacle.name}")
def _import_profiles_if_any(self):
for profile_folder in managers.get_profile_folders(self.reference_tentacles_root):
managers.import_profile(profile_folder, self.bot_install_dir, quite=self.quite_mode) | PypiClean |
/BMeyn-0.19.2.tar.gz/BMeyn-0.19.2/README.md | # My Project

*****
[](http://hits.dwyl.com/BMeyn/temp_python_pkg)
[](https://badge.fury.io/py/BMeyn)
[]()
[]()
## Features:
- Feature 1
- Feature 2
- Feature 3
## Install:
```
pip install BMeyn
```
## Usage:
#TODO: Write usage example
## Contribution:
Please feel free to raise issues or contribute to this project. Here are some steps you can follow if you want to make a contribution
1. Clone the project to your own machine
2. Create a feature branch called: "feature/discription_name"
3. Commit changes to your own branch
4. Create tests for your implemenation
5. Create a pull request when you think your feature is ready
6. We will review your feature and give advice or approve it | PypiClean |
/KratosDemStructuresCouplingApplication-9.4-cp310-cp310-win_amd64.whl/KratosMultiphysics/DemStructuresCouplingApplication/sp_statistics.py | import matplotlib.pyplot as plt
import numpy as np
import h5py
# Read HDF5 file
file_name = 'sp_data.hdf5'
f = h5py.File(file_name, 'r')
# File Attributes
test_id = f.attrs['test_id']
internal_radius = f.attrs['internal_radius']
external_radius = f.attrs['external_radius']
interface_radius = f.attrs['interface_radius']
thickness = f.attrs['thickness']
volume = f.attrs['volume']
real_probe_height = f.attrs['real_probe_height']
target_porosity = f.attrs['target_porosity']
porosity = f.attrs['porosity']
density = f.attrs['density']
# General factors
height_factor = real_probe_height / thickness
porosity_factor = (1.0-target_porosity)/(1.0-porosity)
gram_factor = 1000.0
# Total initial mass
initial_radii = np.array(f['0'].get('radius'))
initial_radii_3 = np.power(initial_radii,3)
initial_masses = 4.0/3.0 * np.pi * density * gram_factor * height_factor * porosity_factor * initial_radii_3
initial_mass = np.sum(initial_masses)
# print(initial_mass)
# SP
all_times=[]
all_pressures=[]
all_sps=[]
# Time and external applied pressure
failure_step = 175 # TODO
max_step = 2*failure_step
max_step = failure_step
times = np.zeros(max_step+1)
for i in range(max_step):
times[i+1] = f[str(i)].attrs['time']
p_rate = 5e10 # TODO
psi_factor = 0.000145038
pressures = [p_rate*t*psi_factor for t in times] # Pressure in psi
t_f=times[failure_step]
# print(t_f)
# Estimated time of initial sanding. TODO
# t_is=times[90]
# t_is=times[130]
# print(t_is)
# Maximum number of intact bonds to consider spheres as SP
max_num_bonds = 6.0 # TODO: this should be an int
for numbonds in np.arange(0.0,max_num_bonds,1.0):
sps = np.zeros(max_step+1)
for i in range(max_step):
# Read datasets
all_radii = np.array(f[str(i)].get('radius'))
continuum_bonds = np.array(f[str(i)].get('current_continuum_bonds'))
xs = np.array(f[str(i)].get('x'))
ys = np.array(f[str(i)].get('y'))
# Separate internal and external spheres (weak region and strong region)
xs_2 = np.power(xs,2)
ys_2 = np.power(ys,2)
distance_2 = xs_2 + ys_2
# weak_radius = 0.5*(internal_radius+interface_radius)
# if times[i+1] < t_is:
# weak_radius = internal_radius
# else:
# weak_radius = internal_radius + (interface_radius-internal_radius)/(t_f-t_is)*(times[i+1]-t_is)
# TODO: count all the spheres in the domain
weak_radius = interface_radius
internal_radii = np.where(distance_2<weak_radius**2,all_radii,0.0) # spheres between inner wall (hole) and a certain radius (weak region)
external_radii = np.where(distance_2>=weak_radius**2,all_radii,0.0) # spheres between a certain radius and the interface wall (dem-fem wall) (strong region)
# Eliminate spheres that are free (SP), taking special care of those falling near the hole (internal spheres)
cont_internal_radii = np.where(continuum_bonds>numbonds,internal_radii,0.0) # We eliminate spheres with small number of bonds in the weak part of the probe
cont_external_radii = np.where(continuum_bonds>0.0,external_radii,0.0)
total_radii_3 = np.power(cont_internal_radii+cont_external_radii,3)
# Compute SP
masses = 4.0/3.0 * np.pi * density * gram_factor * height_factor * porosity_factor * total_radii_3
current_mass = np.sum(masses)
current_sp = initial_mass - current_mass
sps[i+1] = current_sp
all_times.append(times)
all_pressures.append(pressures)
all_sps.append(sps)
# Experiment data
with open('ctw16_experiment.txt') as exp_f:
exp_times=[]
exp_pressures=[]
exp_sps=[]
for line in exp_f:
fields = line.strip().split()
if fields:
exp_times.append(float(fields[0]))
exp_pressures.append(float(fields[1]))
exp_sps.append(float(fields[2]))
all_times.append(exp_times)
all_pressures.append(exp_pressures)
all_sps.append(exp_sps)
# Graphs
graph_name='sp_bonds_t.pdf'
graph_name='sp_bonds.pdf'
graph_labels=['SP up to 0 intact bonds',
'SP up to 1 intact bonds',
'SP up to 2 intact bonds',
'SP up to 3 intact bonds',
'SP up to 4 intact bonds',
'SP up to 5 intact bonds',
'ctw16 experiment',
]
f = plt.figure()
for name, pressures, productions in zip(graph_labels, all_pressures, all_sps):
plt.plot(pressures, productions,label=name)
#p_is = p_rate*t_is*psi_factor
#plt.axvline(x=p_is, c='k', ls='--', label='estimated numerical initial sanding')
# for name, times, productions in zip(graph_labels, all_times, all_sps):
# plt.plot(times, productions,label=name)
# plt.axvline(x=t_is, c='k', ls='--', label='estimated numerical initial sanding')
# plt.axvline(x=t_f, c='k', ls=':', label='numerical collapse')
plt.legend(loc=2, prop={'size': 6})
plt.xlabel('p (psi)')
# plt.xlabel('Time (s)')
plt.ylabel('Sand Production (g)')
plt.title('SP depending on number of intact bonds')
f.savefig(graph_name, bbox_inches='tight') | PypiClean |
/Adafruit_Blinka-8.20.1-py3-none-any.whl/adafruit_blinka/__init__.py | class Enum:
"""
Object supporting CircuitPython-style of static symbols
as seen with Direction.OUTPUT, Pull.UP
"""
def __repr__(self):
"""
Assumes instance will be found as attribute of own class.
Returns dot-subscripted path to instance
(assuming absolute import of containing package)
"""
cls = type(self)
for key in dir(cls):
if getattr(cls, key) is self:
return "{}.{}.{}".format(cls.__module__, cls.__qualname__, key)
return repr(self)
@classmethod
def iteritems(cls):
"""
Inspects attributes of the class for instances of the class
and returns as key,value pairs mirroring dict#iteritems
"""
for key in dir(cls):
val = getattr(cls, key)
if isinstance(cls, val):
yield (key, val)
class ContextManaged:
"""An object that automatically deinitializes hardware with a context manager."""
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.deinit()
# pylint: disable=no-self-use
def deinit(self):
"""Free any hardware used by the object."""
return
# pylint: enable=no-self-use
class Lockable(ContextManaged):
"""An object that must be locked to prevent collisions on a microcontroller resource."""
_locked = False
def try_lock(self):
"""Attempt to grab the lock. Return True on success, False if the lock is already taken."""
if self._locked:
return False
self._locked = True
return True
def unlock(self):
"""Release the lock so others may use the resource."""
if self._locked:
self._locked = False
else:
raise ValueError("Not locked")
def patch_system():
"""Patch modules that may be different due to the platform."""
# pylint: disable=import-outside-toplevel
import sys
from adafruit_blinka.agnostic import time
# pylint: enable=import-outside-toplevel
sys.modules["time"] = time | PypiClean |
/searchspaces/DARTS_new_config.py | import ConfigSpace as CS
import ConfigSpace.hyperparameters as CSH
import os
import csv
import time
from HPO.utils.ConfigStruct import Parameter, Cumulative_Integer_Struct, LTP_Parameter
""" TODO
Seperate Pooling and Convolution Layers
Add more convolution operations (kernalSize and maybe stride)
"""
def init_config():
cs = CS.ConfigurationSpace()
###Optimiser###
lr =CSH.UniformFloatHyperparameter(name = "lr", lower = 0.000001 ,upper = 0.1)
p =CSH.UniformFloatHyperparameter(name = "p", lower = 0.01 ,upper = 0.3 )
epochs = CSH.UniformIntegerHyperparameter(name = "epochs", lower = 30, upper = 300)
c1 = CSH.UniformFloatHyperparameter(name = "c1_weight" , lower = 1,upper = 5)
T_0 = CSH.UniformIntegerHyperparameter(name = "T_0", lower = 1, upper = 10)
T_mult = CSH.UniformIntegerHyperparameter(name = "T_mult", lower = 1, upper = 3)
##AugParameters
jitter = CSH.UniformFloatHyperparameter(name = "jitter", lower = 0.001 ,upper = 0.5)
scaling = CSH.UniformFloatHyperparameter(name = "scaling", lower = 0.001 ,upper = 0.5)
window_warp_num= CSH.UniformIntegerHyperparameter(name = "window_warp_num", lower = 2 ,upper = 10)
jitter_rate= CSH.UniformFloatHyperparameter(name = "jitter_rate", lower = 0.05 ,upper = 0.9)
scaling_rate= CSH.UniformFloatHyperparameter(name = "scaling_rate", lower = 0.05 ,upper = 0.9)
window_warp_rate= CSH.UniformFloatHyperparameter(name = "window_warp_rate", lower = 0.05 ,upper = 0.9)
###Topology Definition]###
hp_list = [
c1,
epochs,
lr,
p,
T_0,
T_mult,
jitter,
scaling,
jitter_rate,
scaling_rate,
window_warp_rate,
window_warp_num]
cs.add_hyperparameters(hp_list)
return cs
if __name__ == "__main__":
from HPO.utils.DARTS_utils import config_space_2_DARTS
configS = init_config()
print(configS.get_hyperparameters())
c = configS.sample_configuration()
print(c)
print(config_space_2_DARTS(c)) | PypiClean |
/ApeMan-0.1.1.tar.gz/ApeMan-0.1.1/apeman/_apeman.py | import os
import sys
# Debugging
from pdb import set_trace as db
# Inspection
import inspect
# Iteration
from itertools import zip_longest as zip_longest
# Imports - Why is this being done here ???
from importlib import util, abc ,machinery
# Debugging
import logging
# Information
__version__ = "0.0.0"
modsep = '.'
if __name__ == "__main__" :
from pathlib import Path
import unittest
suite = unittest.TestLoader().discover('..')
unittest.TextTestRunner(verbosity=1).run(suite)
# print("Main")
# import logging
# logging.basicConfig(format = '%(message)s')
# if sys.version_info[:2] == (3,4) :
# logger = logging.getLogger("__34__")
# if sys.version_info[:2] == (3,5) :
# logger = logging.getLogger("__35__")
# logger.setLevel(logging.DEBUG)
# General Import
# import overlay
# from overlay import *
# Targeted Import
# from overlay import tiers
# Nested Import
# from overlay.tiers import first
# Staggered Import
# from overlay import tiers
# logger.debug("Modules : {}\n".format([key for key in sys.modules.keys() if key.startswith('overlay') or key.startswith('tiers')]))
# from tiers import first
# logger.debug("Modules : {}\n".format([key for key in sys.modules.keys() if 'overlay' in key or 'tiers' in key]))
# logger.debug("\n".join(["{:24} : {}".format(key, sys.modules[key]) for key in sys.modules.keys() if key.startswith('overlay') or key.startswith('tiers')]))
else :
# Note : This code is only compatible with Python 3
if __package__ : # Relative imports for normal usage
if sys.version_info[:2] == (3,5) :
print("35A")
from .__35__ import OverlayImporter
if sys.version_info[:2] == (3,4) :
print("34A")
from .__34__ import OverlayImporter
else : # Absolute imports prevent "SystemError : Parent module '' not loaded,..."
if sys.version_info[:2] == (3,5) :
print("35B")
from __35__ import OverlayImporter
if sys.version_info[:2] == (3,4) :
print("34B")
from __34__ import OverlayImporter | PypiClean |
/functions/graph_embedding/node2vec/edges.py | import numpy as np
from abc import ABC, abstractmethod
from functools import reduce
from itertools import combinations_with_replacement
from gensim.models import KeyedVectors
from tqdm import tqdm
class EdgeEmbedder(ABC):
def __init__(self, keyed_vectors: KeyedVectors, quiet: bool = False):
"""
:param keyed_vectors: KeyedVectors containing nodes and embeddings to calculate edges for
"""
self.kv = keyed_vectors
self.quiet = quiet
@abstractmethod
def _embed(self, edge: tuple) -> np.ndarray:
"""
Abstract method for implementing the embedding method
:param edge: tuple of two nodes
:return: Edge embedding
"""
pass
def __getitem__(self, edge) -> np.ndarray:
if not isinstance(edge, tuple) or not len(edge) == 2:
raise ValueError('edge must be a tuple of two nodes')
if edge[0] not in self.kv.index2word:
raise KeyError('node {} does not exist in given KeyedVectors'.format(edge[0]))
if edge[1] not in self.kv.index2word:
raise KeyError('node {} does not exist in given KeyedVectors'.format(edge[1]))
return self._embed(edge)
def as_keyed_vectors(self) -> KeyedVectors:
"""
Generated a KeyedVectors instance with all the possible edge embeddings
:return: Edge embeddings
"""
edge_generator = combinations_with_replacement(self.kv.index2word, r=2)
if not self.quiet:
vocab_size = len(self.kv.vocab)
total_size = reduce(lambda x, y: x * y, range(1, vocab_size + 2)) / \
(2 * reduce(lambda x, y: x * y, range(1, vocab_size)))
edge_generator = tqdm(edge_generator, desc='Generating edge features', total=total_size)
# Generate features
tokens = []
features = []
for edge in edge_generator:
token = str(tuple(sorted(edge)))
embedding = self._embed(edge)
tokens.append(token)
features.append(embedding)
# Build KV instance
edge_kv = KeyedVectors(vector_size=self.kv.vector_size)
edge_kv.add(
entities=tokens,
weights=features)
return edge_kv
class AverageEmbedder(EdgeEmbedder):
"""
Average node features
"""
def _embed(self, edge: tuple):
return (self.kv[edge[0]] + self.kv[edge[1]]) / 2
class HadamardEmbedder(EdgeEmbedder):
"""
Hadamard product node features
"""
def _embed(self, edge: tuple):
return self.kv[edge[0]] * self.kv[edge[1]]
class WeightedL1Embedder(EdgeEmbedder):
"""
Weighted L1 node features
"""
def _embed(self, edge: tuple):
return np.abs(self.kv[edge[0]] - self.kv[edge[1]])
class WeightedL2Embedder(EdgeEmbedder):
"""
Weighted L2 node features
"""
def _embed(self, edge: tuple):
return (self.kv[edge[0]] - self.kv[edge[1]]) ** 2 | PypiClean |
/Netfoll_TL-2.0.1-py3-none-any.whl/netfoll_tl/tl/functions/help.py | from ...tl.tlobject import TLObject
from ...tl.tlobject import TLRequest
from typing import Optional, List, Union, TYPE_CHECKING
import os
import struct
from datetime import datetime
if TYPE_CHECKING:
from ...tl.types import TypeDataJSON, TypeInputAppEvent, TypeInputPeer, TypeInputUser, TypeMessageEntity
class AcceptTermsOfServiceRequest(TLRequest):
CONSTRUCTOR_ID = 0xee72f79a
SUBCLASS_OF_ID = 0xf5b399ac
# noinspection PyShadowingBuiltins
def __init__(self, id: 'TypeDataJSON'):
"""
:returns Bool: This type has no constructors.
"""
self.id = id
def to_dict(self):
return {
'_': 'AcceptTermsOfServiceRequest',
'id': self.id.to_dict() if isinstance(self.id, TLObject) else self.id
}
def _bytes(self):
return b''.join((
b'\x9a\xf7r\xee',
self.id._bytes(),
))
@classmethod
def from_reader(cls, reader):
_id = reader.tgread_object()
return cls(id=_id)
class DismissSuggestionRequest(TLRequest):
CONSTRUCTOR_ID = 0xf50dbaa1
SUBCLASS_OF_ID = 0xf5b399ac
def __init__(self, peer: 'TypeInputPeer', suggestion: str):
"""
:returns Bool: This type has no constructors.
"""
self.peer = peer
self.suggestion = suggestion
async def resolve(self, client, utils):
self.peer = utils.get_input_peer(await client.get_input_entity(self.peer))
def to_dict(self):
return {
'_': 'DismissSuggestionRequest',
'peer': self.peer.to_dict() if isinstance(self.peer, TLObject) else self.peer,
'suggestion': self.suggestion
}
def _bytes(self):
return b''.join((
b'\xa1\xba\r\xf5',
self.peer._bytes(),
self.serialize_bytes(self.suggestion),
))
@classmethod
def from_reader(cls, reader):
_peer = reader.tgread_object()
_suggestion = reader.tgread_string()
return cls(peer=_peer, suggestion=_suggestion)
class EditUserInfoRequest(TLRequest):
CONSTRUCTOR_ID = 0x66b91b70
SUBCLASS_OF_ID = 0x5c53d7d8
def __init__(self, user_id: 'TypeInputUser', message: str, entities: List['TypeMessageEntity']):
"""
:returns help.UserInfo: Instance of either UserInfoEmpty, UserInfo.
"""
self.user_id = user_id
self.message = message
self.entities = entities
async def resolve(self, client, utils):
self.user_id = utils.get_input_user(await client.get_input_entity(self.user_id))
def to_dict(self):
return {
'_': 'EditUserInfoRequest',
'user_id': self.user_id.to_dict() if isinstance(self.user_id, TLObject) else self.user_id,
'message': self.message,
'entities': [] if self.entities is None else [x.to_dict() if isinstance(x, TLObject) else x for x in self.entities]
}
def _bytes(self):
return b''.join((
b'p\x1b\xb9f',
self.user_id._bytes(),
self.serialize_bytes(self.message),
b'\x15\xc4\xb5\x1c',struct.pack('<i', len(self.entities)),b''.join(x._bytes() for x in self.entities),
))
@classmethod
def from_reader(cls, reader):
_user_id = reader.tgread_object()
_message = reader.tgread_string()
reader.read_int()
_entities = []
for _ in range(reader.read_int()):
_x = reader.tgread_object()
_entities.append(_x)
return cls(user_id=_user_id, message=_message, entities=_entities)
class GetAppChangelogRequest(TLRequest):
CONSTRUCTOR_ID = 0x9010ef6f
SUBCLASS_OF_ID = 0x8af52aac
def __init__(self, prev_app_version: str):
"""
:returns Updates: Instance of either UpdatesTooLong, UpdateShortMessage, UpdateShortChatMessage, UpdateShort, UpdatesCombined, Updates, UpdateShortSentMessage.
"""
self.prev_app_version = prev_app_version
def to_dict(self):
return {
'_': 'GetAppChangelogRequest',
'prev_app_version': self.prev_app_version
}
def _bytes(self):
return b''.join((
b'o\xef\x10\x90',
self.serialize_bytes(self.prev_app_version),
))
@classmethod
def from_reader(cls, reader):
_prev_app_version = reader.tgread_string()
return cls(prev_app_version=_prev_app_version)
class GetAppConfigRequest(TLRequest):
CONSTRUCTOR_ID = 0x61e3f854
SUBCLASS_OF_ID = 0x14381c9a
# noinspection PyShadowingBuiltins
def __init__(self, hash: int):
"""
:returns help.AppConfig: Instance of either AppConfigNotModified, AppConfig.
"""
self.hash = hash
def to_dict(self):
return {
'_': 'GetAppConfigRequest',
'hash': self.hash
}
def _bytes(self):
return b''.join((
b'T\xf8\xe3a',
struct.pack('<i', self.hash),
))
@classmethod
def from_reader(cls, reader):
_hash = reader.read_int()
return cls(hash=_hash)
class GetAppUpdateRequest(TLRequest):
CONSTRUCTOR_ID = 0x522d5a7d
SUBCLASS_OF_ID = 0x5897069e
def __init__(self, source: str):
"""
:returns help.AppUpdate: Instance of either AppUpdate, NoAppUpdate.
"""
self.source = source
def to_dict(self):
return {
'_': 'GetAppUpdateRequest',
'source': self.source
}
def _bytes(self):
return b''.join((
b'}Z-R',
self.serialize_bytes(self.source),
))
@classmethod
def from_reader(cls, reader):
_source = reader.tgread_string()
return cls(source=_source)
class GetCdnConfigRequest(TLRequest):
CONSTRUCTOR_ID = 0x52029342
SUBCLASS_OF_ID = 0xecda397c
def to_dict(self):
return {
'_': 'GetCdnConfigRequest'
}
def _bytes(self):
return b''.join((
b'B\x93\x02R',
))
@classmethod
def from_reader(cls, reader):
return cls()
class GetConfigRequest(TLRequest):
CONSTRUCTOR_ID = 0xc4f9186b
SUBCLASS_OF_ID = 0xd3262a4a
def to_dict(self):
return {
'_': 'GetConfigRequest'
}
def _bytes(self):
return b''.join((
b'k\x18\xf9\xc4',
))
@classmethod
def from_reader(cls, reader):
return cls()
class GetCountriesListRequest(TLRequest):
CONSTRUCTOR_ID = 0x735787a8
SUBCLASS_OF_ID = 0xea31fe88
# noinspection PyShadowingBuiltins
def __init__(self, lang_code: str, hash: int):
"""
:returns help.CountriesList: Instance of either CountriesListNotModified, CountriesList.
"""
self.lang_code = lang_code
self.hash = hash
def to_dict(self):
return {
'_': 'GetCountriesListRequest',
'lang_code': self.lang_code,
'hash': self.hash
}
def _bytes(self):
return b''.join((
b'\xa8\x87Ws',
self.serialize_bytes(self.lang_code),
struct.pack('<i', self.hash),
))
@classmethod
def from_reader(cls, reader):
_lang_code = reader.tgread_string()
_hash = reader.read_int()
return cls(lang_code=_lang_code, hash=_hash)
class GetDeepLinkInfoRequest(TLRequest):
CONSTRUCTOR_ID = 0x3fedc75f
SUBCLASS_OF_ID = 0x984aac38
def __init__(self, path: str):
"""
:returns help.DeepLinkInfo: Instance of either DeepLinkInfoEmpty, DeepLinkInfo.
"""
self.path = path
def to_dict(self):
return {
'_': 'GetDeepLinkInfoRequest',
'path': self.path
}
def _bytes(self):
return b''.join((
b'_\xc7\xed?',
self.serialize_bytes(self.path),
))
@classmethod
def from_reader(cls, reader):
_path = reader.tgread_string()
return cls(path=_path)
class GetInviteTextRequest(TLRequest):
CONSTRUCTOR_ID = 0x4d392343
SUBCLASS_OF_ID = 0xcf70aa35
def to_dict(self):
return {
'_': 'GetInviteTextRequest'
}
def _bytes(self):
return b''.join((
b'C#9M',
))
@classmethod
def from_reader(cls, reader):
return cls()
class GetNearestDcRequest(TLRequest):
CONSTRUCTOR_ID = 0x1fb33026
SUBCLASS_OF_ID = 0x3877045f
def to_dict(self):
return {
'_': 'GetNearestDcRequest'
}
def _bytes(self):
return b''.join((
b'&0\xb3\x1f',
))
@classmethod
def from_reader(cls, reader):
return cls()
class GetPassportConfigRequest(TLRequest):
CONSTRUCTOR_ID = 0xc661ad08
SUBCLASS_OF_ID = 0xc666c0ad
# noinspection PyShadowingBuiltins
def __init__(self, hash: int):
"""
:returns help.PassportConfig: Instance of either PassportConfigNotModified, PassportConfig.
"""
self.hash = hash
def to_dict(self):
return {
'_': 'GetPassportConfigRequest',
'hash': self.hash
}
def _bytes(self):
return b''.join((
b'\x08\xada\xc6',
struct.pack('<i', self.hash),
))
@classmethod
def from_reader(cls, reader):
_hash = reader.read_int()
return cls(hash=_hash)
class GetPremiumPromoRequest(TLRequest):
CONSTRUCTOR_ID = 0xb81b93d4
SUBCLASS_OF_ID = 0xc987a338
def to_dict(self):
return {
'_': 'GetPremiumPromoRequest'
}
def _bytes(self):
return b''.join((
b'\xd4\x93\x1b\xb8',
))
@classmethod
def from_reader(cls, reader):
return cls()
class GetPromoDataRequest(TLRequest):
CONSTRUCTOR_ID = 0xc0977421
SUBCLASS_OF_ID = 0x9d595542
def to_dict(self):
return {
'_': 'GetPromoDataRequest'
}
def _bytes(self):
return b''.join((
b'!t\x97\xc0',
))
@classmethod
def from_reader(cls, reader):
return cls()
class GetRecentMeUrlsRequest(TLRequest):
CONSTRUCTOR_ID = 0x3dc0f114
SUBCLASS_OF_ID = 0xf269c477
def __init__(self, referer: str):
"""
:returns help.RecentMeUrls: Instance of RecentMeUrls.
"""
self.referer = referer
def to_dict(self):
return {
'_': 'GetRecentMeUrlsRequest',
'referer': self.referer
}
def _bytes(self):
return b''.join((
b'\x14\xf1\xc0=',
self.serialize_bytes(self.referer),
))
@classmethod
def from_reader(cls, reader):
_referer = reader.tgread_string()
return cls(referer=_referer)
class GetSupportRequest(TLRequest):
CONSTRUCTOR_ID = 0x9cdf08cd
SUBCLASS_OF_ID = 0x7159bceb
def to_dict(self):
return {
'_': 'GetSupportRequest'
}
def _bytes(self):
return b''.join((
b'\xcd\x08\xdf\x9c',
))
@classmethod
def from_reader(cls, reader):
return cls()
class GetSupportNameRequest(TLRequest):
CONSTRUCTOR_ID = 0xd360e72c
SUBCLASS_OF_ID = 0x7f50b7c2
def to_dict(self):
return {
'_': 'GetSupportNameRequest'
}
def _bytes(self):
return b''.join((
b',\xe7`\xd3',
))
@classmethod
def from_reader(cls, reader):
return cls()
class GetTermsOfServiceUpdateRequest(TLRequest):
CONSTRUCTOR_ID = 0x2ca51fd1
SUBCLASS_OF_ID = 0x293c2977
def to_dict(self):
return {
'_': 'GetTermsOfServiceUpdateRequest'
}
def _bytes(self):
return b''.join((
b'\xd1\x1f\xa5,',
))
@classmethod
def from_reader(cls, reader):
return cls()
class GetUserInfoRequest(TLRequest):
CONSTRUCTOR_ID = 0x38a08d3
SUBCLASS_OF_ID = 0x5c53d7d8
def __init__(self, user_id: 'TypeInputUser'):
"""
:returns help.UserInfo: Instance of either UserInfoEmpty, UserInfo.
"""
self.user_id = user_id
async def resolve(self, client, utils):
self.user_id = utils.get_input_user(await client.get_input_entity(self.user_id))
def to_dict(self):
return {
'_': 'GetUserInfoRequest',
'user_id': self.user_id.to_dict() if isinstance(self.user_id, TLObject) else self.user_id
}
def _bytes(self):
return b''.join((
b'\xd3\x08\x8a\x03',
self.user_id._bytes(),
))
@classmethod
def from_reader(cls, reader):
_user_id = reader.tgread_object()
return cls(user_id=_user_id)
class HidePromoDataRequest(TLRequest):
CONSTRUCTOR_ID = 0x1e251c95
SUBCLASS_OF_ID = 0xf5b399ac
def __init__(self, peer: 'TypeInputPeer'):
"""
:returns Bool: This type has no constructors.
"""
self.peer = peer
async def resolve(self, client, utils):
self.peer = utils.get_input_peer(await client.get_input_entity(self.peer))
def to_dict(self):
return {
'_': 'HidePromoDataRequest',
'peer': self.peer.to_dict() if isinstance(self.peer, TLObject) else self.peer
}
def _bytes(self):
return b''.join((
b'\x95\x1c%\x1e',
self.peer._bytes(),
))
@classmethod
def from_reader(cls, reader):
_peer = reader.tgread_object()
return cls(peer=_peer)
class SaveAppLogRequest(TLRequest):
CONSTRUCTOR_ID = 0x6f02f748
SUBCLASS_OF_ID = 0xf5b399ac
def __init__(self, events: List['TypeInputAppEvent']):
"""
:returns Bool: This type has no constructors.
"""
self.events = events
def to_dict(self):
return {
'_': 'SaveAppLogRequest',
'events': [] if self.events is None else [x.to_dict() if isinstance(x, TLObject) else x for x in self.events]
}
def _bytes(self):
return b''.join((
b'H\xf7\x02o',
b'\x15\xc4\xb5\x1c',struct.pack('<i', len(self.events)),b''.join(x._bytes() for x in self.events),
))
@classmethod
def from_reader(cls, reader):
reader.read_int()
_events = []
for _ in range(reader.read_int()):
_x = reader.tgread_object()
_events.append(_x)
return cls(events=_events)
class SetBotUpdatesStatusRequest(TLRequest):
CONSTRUCTOR_ID = 0xec22cfcd
SUBCLASS_OF_ID = 0xf5b399ac
def __init__(self, pending_updates_count: int, message: str):
"""
:returns Bool: This type has no constructors.
"""
self.pending_updates_count = pending_updates_count
self.message = message
def to_dict(self):
return {
'_': 'SetBotUpdatesStatusRequest',
'pending_updates_count': self.pending_updates_count,
'message': self.message
}
def _bytes(self):
return b''.join((
b'\xcd\xcf"\xec',
struct.pack('<i', self.pending_updates_count),
self.serialize_bytes(self.message),
))
@classmethod
def from_reader(cls, reader):
_pending_updates_count = reader.read_int()
_message = reader.tgread_string()
return cls(pending_updates_count=_pending_updates_count, message=_message) | PypiClean |
/Flask-Statics-Helper-1.0.0.tar.gz/Flask-Statics-Helper-1.0.0/flask_statics/static/angular/i18n/angular-locale_kw-gb.js | 'use strict';
angular.module("ngLocale", [], ["$provide", function($provide) {
var PLURAL_CATEGORY = {ZERO: "zero", ONE: "one", TWO: "two", FEW: "few", MANY: "many", OTHER: "other"};
function getDecimals(n) {
n = n + '';
var i = n.indexOf('.');
return (i == -1) ? 0 : n.length - i - 1;
}
function getVF(n, opt_precision) {
var v = opt_precision;
if (undefined === v) {
v = Math.min(getDecimals(n), 3);
}
var base = Math.pow(10, v);
var f = ((n * base) | 0) % base;
return {v: v, f: f};
}
$provide.value("$locale", {
"DATETIME_FORMATS": {
"AMPMS": [
"a.m.",
"p.m."
],
"DAY": [
"De Sul",
"De Lun",
"De Merth",
"De Merher",
"De Yow",
"De Gwener",
"De Sadorn"
],
"MONTH": [
"Mys Genver",
"Mys Whevrel",
"Mys Merth",
"Mys Ebrel",
"Mys Me",
"Mys Efan",
"Mys Gortheren",
"Mye Est",
"Mys Gwyngala",
"Mys Hedra",
"Mys Du",
"Mys Kevardhu"
],
"SHORTDAY": [
"Sul",
"Lun",
"Mth",
"Mhr",
"Yow",
"Gwe",
"Sad"
],
"SHORTMONTH": [
"Gen",
"Whe",
"Mer",
"Ebr",
"Me",
"Efn",
"Gor",
"Est",
"Gwn",
"Hed",
"Du",
"Kev"
],
"fullDate": "EEEE d MMMM y",
"longDate": "d MMMM y",
"medium": "d MMM y HH:mm:ss",
"mediumDate": "d MMM y",
"mediumTime": "HH:mm:ss",
"short": "dd/MM/y HH:mm",
"shortDate": "dd/MM/y",
"shortTime": "HH:mm"
},
"NUMBER_FORMATS": {
"CURRENCY_SYM": "\u00a3",
"DECIMAL_SEP": ".",
"GROUP_SEP": ",",
"PATTERNS": [
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 3,
"minFrac": 0,
"minInt": 1,
"negPre": "-",
"negSuf": "",
"posPre": "",
"posSuf": ""
},
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 2,
"minFrac": 2,
"minInt": 1,
"negPre": "\u00a4-",
"negSuf": "",
"posPre": "\u00a4",
"posSuf": ""
}
]
},
"id": "kw-gb",
"pluralCat": function(n, opt_precision) { var i = n | 0; var vf = getVF(n, opt_precision); if (i == 1 && vf.v == 0) { return PLURAL_CATEGORY.ONE; } return PLURAL_CATEGORY.OTHER;}
});
}]); | PypiClean |
/OASYS1_HALF_SRW-0.0.3-py3-none-any.whl/orangecontrib/srw/widgets/light_sources/ow_srw_3d_light_source.py | import sys
from PyQt5.QtWidgets import QApplication
from orangewidget import gui
from orangewidget.settings import Setting
from oasys.widgets import gui as oasysgui
from oasys.widgets import congruence
from wofrysrw.storage_ring.light_sources.srw_3d_light_source import SRW3DLightSource
from wofrysrw.storage_ring.magnetic_structures.srw_3d_magnetic_structure import SRW3DMagneticStructure
from orangecontrib.srw.widgets.gui.ow_srw_source import OWSRWSource
class OWSRW3DLightSource(OWSRWSource):
name = "3D Light Source"
description = "SRW Source: 3D Light Source"
icon = "icons/3d.png"
priority = 100
file_name = Setting("")
comment_character = Setting("#")
interpolation_method = Setting(0)
want_main_area=1
def __init__(self):
super().__init__()
left_box_2 = oasysgui.widgetBox(self.tab_source, "3D file Parameters", addSpace=True, orientation="vertical", height=175)
file_box = oasysgui.widgetBox(left_box_2, "", addSpace=False, orientation="horizontal")
self.le_file_name = oasysgui.lineEdit(file_box, self, "file_name", "3D data file", labelWidth=95, valueType=str, orientation="horizontal")
gui.button(file_box, self, "...", callback=self.select3DDataFile)
oasysgui.lineEdit(left_box_2, self, "comment_character", "Comment Character", labelWidth=320, valueType=str, orientation="horizontal")
gui.comboBox(left_box_2, self, "interpolation_method", label="Interpolation Method",
items=["bi-linear", "bi-quadratic", "bi-cubic"], labelWidth=260,
sendSelectedValue=False, orientation="horizontal")
gui.rubber(self.controlArea)
gui.rubber(self.mainArea)
def select3DDataFile(self):
self.le_file_name.setText(oasysgui.selectFileFromDialog(self, self.file_name, "3D data file"))
# TODO: these methods maker sense only after reading the file, must be fixed
def get_automatic_sr_method(self):
return 2
def get_default_initial_z(self):
try:
return SRW3DMagneticStructure.get_default_initial_z(self.file_name, self.comment_character)
except:
return 0.0
def get_source_length(self):
try:
return SRW3DMagneticStructure.get_source_length(self.file_name, self.comment_character)
except:
return 0.0
def get_srw_source(self, electron_beam):
return SRW3DLightSource(electron_beam=electron_beam,
magnet_magnetic_structure=SRW3DMagneticStructure(self.file_name, self.comment_character, self.interpolation_method+1))
def print_specific_infos(self, srw_source):
pass
def checkLightSourceSpecificFields(self):
congruence.checkFile(self.file_name)
congruence.checkEmptyString(self.comment_character, "Comment character")
def receive_specific_syned_data(self, data):
raise ValueError("Syned data not available for this kind of source")
if __name__ == "__main__":
a = QApplication(sys.argv)
ow = SRW3DMagneticStructure()
ow.show()
a.exec_()
ow.saveSettings() | PypiClean |
/BindingGP-0.0.36.tar.gz/BindingGP-0.0.36/bgp/calculation/translate.py | import copy
import sys
import numpy as np
import sympy
from sympy import Number, Expr
from sympy.core.numbers import ComplexInfinity, NumberSymbol
from bgp.calculation.coefficient import get_args
"""lambidfy"""
def general_expr_dict(self, expr_init_map, free_symbol, gsym_map, simplifying=False):
"""gen expr"""
init_sub = expr_init_map.items()
for i, j in init_sub:
self = self.xreplace({sympy.Symbol(i): j})
fs = free_symbol
func = sympy.lambdify(fs[0], self, modules=(gsym_map, "sympy"))
res = func(*fs[1])
if simplifying:
res = sympy.simplify(res)
return res
def general_expr(self, pset, simplifying=False):
"""
Parameters
----------
simplifying: bool
self:sympy.Expr
pset:SymbolSet
Returns
-------
"""
res = general_expr_dict(self, pset.expr_init_map, pset.free_symbol,
pset.gsym_map, simplifying=simplifying)
return res
def group_str(self, pset, feature_name=False):
"""
return expr just build by input feature name.
Parameters
----------
self:sympy.Expr or SymbolTree
pset:SymbolSet
feature_name:Bool
Returns
-------
"""
#####get expr
if isinstance(self, Expr):
expr = copy.deepcopy(self)
expr = simple(expr, pset.gro_ter_con)[0]
else:
if hasattr(self, "coef_expr"):
expr = self.coef_expr
elif hasattr(self, "expr") and self.expr is not None:
expr = self.expr
else:
if hasattr(self, "capsule"):
self = self.capsule
try:
expr = compile_context(self, pset.context, pset.gro_ter_con)
except TypeError:
raise TypeError("the first inupt must be sympy.expr or SymbolTree,"
"the second is SymbolSet")
### replace newi to (xi+xj*xk...)
e_map_va1 = list(pset.expr_init_map.items())
e_map_va1.reverse()
for i, j in e_map_va1:
expr = expr.subs(i, j)
### to str
name_subd = str(expr)
### replace Vi() to Vi*(),Vi+()
arg_list = get_args(expr, sole=False)
V_map1 = {ar.name: str([str(_) for _ in ar.arr.ravel()]) for ar in arg_list if
hasattr(ar, "arr") and ar.tp == "Coef"}
V_map2 = {ar.name: str([str(_) for _ in ar.arr.ravel()]) for ar in arg_list if
hasattr(ar, "arr") and ar.tp == "Const"}
V_map_va1 = list(V_map1.items())
V_map_va2 = list(V_map2.items())
V_map_va1.reverse()
V_map_va2.reverse()
for i, j in V_map_va1:
name_subd = name_subd.replace(i, "%s*" % j)
for i, j in V_map_va2:
name_subd = name_subd.replace(i, "%s+" % j)
### replace gxi to [xi,xj]
t_map_va1 = list(pset.terminals_init_map.items())
t_map_va1.reverse()
for i, j in t_map_va1:
name_subd = name_subd.replace(i, j)
### replace ci to (float,)
c_map_va1 = list(pset.data_x_dict.keys())
c_map_va1 = [i for i in c_map_va1 if "c" in i]
c_map_va1.reverse()
for i in c_map_va1:
name_subd = name_subd.replace(i, "%.3e" % float(pset.data_x_dict[i]))
### replace represent xi to (latex,)
if feature_name:
if pset.terminals_fea_map:
for j1, j2 in pset.terminals_fea_map.values():
name_subd = name_subd.replace(j1, j2)
else:
print("Don not assign the feature_name to pset when pest.add_features")
print(name_subd)
return name_subd
def simple(expr01, groups):
"""
str to sympy.Expr function.
add conv to MMdd and MMul.
the calcualte conv need conform with np_func()!!
is_jump: jump the calculate >= 3 (group_size).
keep: the calculate is return then input group_size or 1.
"""
def max_method(expr):
new = [calculate_number(i) for i in expr.args]
try:
exprarg_new = list(zip(*new))[0]
n = list(list(zip(*new))[1])
expr = expr.func(*exprarg_new)
n.append(1)
le = len(set(n))
if le >= 3:
return expr, np.nan
else:
return expr, max(n)
except IndexError:
print(expr)
return expr, np.nan
def calculate_number(expr):
if isinstance(expr, sympy.Symbol):
return expr, groups[expr.name]
elif isinstance(expr, (Number, ComplexInfinity)):
return expr, 1
elif isinstance(expr, NumberSymbol):
return expr, 1
else:
if hasattr(expr.func, "keep"):
expr_arg, ns = calculate_number(expr.args[0])
if expr.func.keep: ###["Self,Conv"]
if ns == 1:
expr = expr_arg
return expr, ns
elif ns == 2:
expr = expr.func(expr_arg)
expr.conu = ns
return expr, ns
elif ns >= 3:
if expr.func.is_jump:
expr = expr_arg
return expr, ns
else:
expr = expr.func(expr_arg)
expr.conu = ns
return expr, ns
else:
expr = expr_arg
return expr, ns
else:
if ns == 1:
expr = expr_arg
return expr, ns
elif ns == 2:
expr = expr.func(expr_arg)
expr.conu = ns
return expr, 1
elif ns >= 3:
if expr.func.is_jump:
expr = expr_arg
return expr, ns
else:
expr = expr.func(expr_arg) ###["MAdd", "MMul"]
expr.conu = ns
return expr, 1
else:
# expr = expr_arg
expr = expr.func(expr_arg)
expr.conu = ns
return expr, ns
elif hasattr(expr, "arr"):
#### expr, ns = max_method(expr)
#### assert expr.arr.shape[0] == ns #
return max_method(expr)
else:
return max_method(expr)
expr01 = calculate_number(expr01)
return expr01
def compile_context(expr, context, gro_ter_con, simplify=True):
"""Compile the expression *expr*.
:param expr: Expression to compile. It can either be a PrimitiveTree,
a string of Python code or any object that when
converted into string produced a valid Python code
expression.
:param context: dict
:param simplify: bool
:param gro_ter_con: list if group_size
:returns: a function if the primitive set has 1 or more arguments,
or return the results produced by evaluating the tree.
"""
if isinstance(expr, str):
code = expr
else:
code = repr(expr)
try:
expr = eval(code, context, {})
except MemoryError:
_, _, traceback = sys.exc_info()
raise MemoryError("DEAP : Error in tree evaluation :"
" Python cannot evaluate a tree higher than 90. "
"To avoid this problem, you should use bloat control on your "
"operators. See the DEAP documentation for more information. "
"DEAP will now abort.").with_traceback(traceback)
if simplify:
expr = simple(expr, gro_ter_con)[0]
return expr
def compile_(expr, pset):
"""Compile the expression *expr*.
:param expr: Expression to compile. It can either be a PrimitiveTree,
a string of Python code or any object that when
converted into string produced a valid Python code
expression.
:param pset: Primitive set against which the expression is compile.
:returns: a function if the primitive set has 1 or more arguments,
or return the results produced by evaluating the tree.
"""
if isinstance(expr, str):
code = expr
else:
code = repr(expr)
if len(pset.arguments) > 0:
# This section is a stripped version of the lambdify
# function of SymPy 0.6.6.
args = ",".join(arg for arg in pset.arguments)
code = "lambda {args}: {code}".format(args=args, code=code)
try:
return eval(code, pset.context, {})
except MemoryError:
_, _, traceback = sys.exc_info()
raise MemoryError("DEAP : Error in tree evaluation :"
" Python cannot evaluate a tree higher than 90. "
"To avoid this problem, you should use bloat control on your "
"operators. See the DEAP documentation for more information. "
"DEAP will now abort.").with_traceback(traceback) | PypiClean |
/Gnosis_Utils-1.2.2.tar.gz/Gnosis_Utils-1.2.2/gnosis/xml/objectify/utils.py | from __future__ import generators
from gnosis.xml.objectify._objectify import _XO_
from gnosis.xml.objectify._objectify import *
from exceptions import TypeError
from types import *
from itertools import islice
from sys import maxint, stdout
def addChild(parent, child):
"Add a child xmlObject to a parent xmlObject"
name = tagname(child)
if hasattr(parent, name): # If exists add, make into list if needed
tag = getattr(parent, name)
if type(tag) is not list:
setattr(parent, name, [tag])
tag = [tag]
tag.append(child)
else: # Create a new one
setattr(parent, name, child)
if not parent._seq: parent._seq = []
parent._seq.append(child) # All tags end up in _seq
def walk_xo(o):
"Recursively traverse the nodes of an _XO_ tree (depth first)"
yield o
for node in children(o):
for child in walk_xo(node):
yield child
def write_xml(o, out=stdout):
"Serialize an _XO_ object back into XML"
out.write("<%s" % tagname(o))
for attr in attributes(o):
out.write(' %s=%s' % attr)
out.write('>')
for node in content(o):
if type(node) in StringTypes:
out.write(node)
else:
write_xml(node, out=out)
out.write("</%s>" % tagname(o))
def XPath(o, path):
"Find node(s) within an _XO_ object"
if not isinstance(o,_XO_):
raise TypeError, \
"XPath() only defined on gnosis.xml.objectify._XO_ object"
path = path.replace('//','/!!') # Placeholder hack for easy splitting
if path.startswith('/'): # No need for init / since node==root
path = path[1:]
if path.startswith('!!'): # Recursive path fragment
path, start, stop = indices(path)
i = 0
for match in walk_xo(o):
if i >= stop: return
for node in XPath(match, path[2:]):
if start <= i < stop:
yield node
i += 1
elif '/' in path[1:]: # Compound, non-recursive
head, tail = path.split('/', 1)
for match in XPath(o, head):
for node in XPath(match, tail):
yield node
else: # Atomic path fragment
path, start, stop = indices(path)
if path=="*": # Node wildcard
for node in islice(children(o), start, stop):
yield node
elif path=="text()": # Node text(s)
for s in islice(text(o), start, stop):
yield s
elif path.startswith('@*'): # All node attributes
for attr in attributes(o):
yield attr
elif path.startswith('@'): # Specific node attribute
for attr in attributes(o):
if attr[0]==path[1:]:
yield attr
elif hasattr(o, path): # Named node type
for node in islice(getattr(o, path), start, stop):
yield node
def indices(path):
if '[' in path: # Check for indices
path, param = path[:-1].split('[')
slice_ = map(int, param.split('..'))
start = slice_[0]-1
if len(slice_) == 2:
stop = slice_[1]
else:
stop = start+1
else:
start, stop = 0, maxint
return path, start, stop
def _dir(o):
try: return o.__dict__.keys()
except: return []
#-- Self-test utility functions
def pyobj_printer(py_obj, level=0):
"Return a 'deep' string description of a Python object"
if level==0: descript = '-----* '+py_obj.__class__.__name__+' *-----\n'
else: descript = ''
if hasattr(py_obj, '_XML'): # present the literal XML of object
prettified_XML = ' '.join(py_obj._XML.split())[:50]
descript = (' '*level)+'CONTENT='+prettified_XML+'...\n'
else: # present the object hierarchy view
for membname in _dir(py_obj):
if membname in ("__parent__", "_seq"):
continue # ExpatFactory uses bookeeping attribute
member = getattr(py_obj,membname)
if type(member) == InstanceType:
descript += '\n'+(' '*level)+'{'+membname+'}\n'
descript += pyobj_printer(member, level+3)
elif type(member) == ListType:
for i in range(len(member)):
descript += '\n'+(' '*level)+'['+membname+'] #'+str(i+1)
descript += (' '*level)+'\n'+pyobj_printer(member[i],level+3)
else:
descript += (' '*level)+membname+'='
memval = ' '.join(unicode(member).split())
if len(memval) > 50:
descript += memval[:50]+'...\n'
else:
descript += memval + '\n'
return descript | PypiClean |
/ComponentDB-CLI-3.15.5.tar.gz/ComponentDB-CLI-3.15.5/cdbCli/service/cli/cdbCliCmnds/setItemLogById.py |
import sys
import re
import click
import csv
from cdbApi import LogEntryEditInformation
from cdbApi import ApiException
from datetime import datetime
from cdbCli.common.cli.cliBase import CliBase
##############################################################################################
# #
# Add log to item given the item's ID #
# #
##############################################################################################
def set_item_log_by_id_helper(item_api, item_id, log_entry, effective_date=None):
"""Helper function to set a log for an item in CDB
:param item_api: Necessary item api object
:param item_id: item ID of the object which the log is being written for
:param log_entry: the log entry to be written
:param effective_date: optional date of log"""
if effective_date:
effective_date = datetime.strptime(effective_date, "%Y-%m-%d")
try:
log_entry_obj = LogEntryEditInformation(
item_id=item_id, log_entry=log_entry, effective_date=effective_date
)
item_api.add_log_entry_to_item(log_entry_edit_information=log_entry_obj)
except ApiException as e:
p = r'"localizedMessage.*'
matches = re.findall(p, e.body)
if matches:
error = "Error uploading log entry: " + matches[0][:-2]
click.echo(error)
else:
click.echo("Error uploading log entry")
exit(1)
@click.command()
@click.option(
"--input-file",
help="Input csv file with item_id,log_data,effective_date default is STDIN",
type=click.File("r"),
default=sys.stdin,
)
@click.option(
"--effective-date", is_flag=True, help="Set if effective date is listed in input"
)
@click.pass_obj
def set_item_log_by_id(cli, input_file, effective_date=False):
"""Adds a log entry to the given item ids with optional effective date
\b
Example (file): set-item-log-by-id --input-file filename.csv --effective-date='yes'
Example (pipe): cat filename.csv | set-item-log-by-id
Example (terminal): set-item-log-by-id
header
<Insert Item ID>,<example log text>
Input is either through a named csv file or through STDIN. Default is STDIN
The format of the input data is an intended row to be removed followed by
<Item ID>,<Log Data>,<Effective Date>.
"""
try:
factory = cli.require_authenticated_api()
except ApiException:
click.echo("Unauthorized User/ Wrong Username or Password. Try again.")
return
item_api = factory.getItemApi()
stdin_msg = "Entry per line: <item_id>,<log_text>"
reader, stdin_tty_mode = cli.prepare_cli_input_csv_reader(input_file, stdin_msg)
# Parse lines of csv
for row in reader:
if row.__len__() == 0 and stdin_tty_mode:
break
if not row[0]:
continue
item_id = row[0]
log_entry = row[1]
if effective_date:
effective_date = row[2]
else:
effective_date = None
set_item_log_by_id_helper(item_api, item_id, log_entry, effective_date)
if __name__ == "__main__":
set_item_log_by_id() | PypiClean |
/GenIce2-2.1.7.1.tar.gz/GenIce2-2.1.7.1/genice2/lattices/Struct29.py | from genice2.cell import cellvectors
import genice2.lattices
desc = {"ref": {"SpaceFullerene": 'Sikiric 2010'},
"usage": "No options available.",
"brief": "A space fullerene."
}
# coding: utf-8
"""
Data source: Dutour Sikirić, Mathieu, Olaf Delgado-Friedrichs, and Michel Deza. “Space Fullerenes: a Computer Search for New Frank-Kasper Structures” Acta Crystallographica Section A Foundations of Crystallography 66.Pt 5 (2010): 602–615.
Cage composition:
(12,14,15,16) = (22,4,4,8,)
"""
desc = {"ref": {"SpaceFullerene": 'Sikiric 2010'},
"usage": "No options available.",
"brief": "A space fullerene."
}
class Lattice(genice2.lattices.Lattice):
def __init__(self):
self.pairs = """
110 129
199 204
139 79
113 157
7 65
91 111
40 201
44 211
102 210
85 47
183 162
106 50
75 47
82 99
19 36
134 179
50 79
186 120
108 32
168 21
139 144
153 118
71 187
25 9
149 158
166 212
121 55
158 56
160 21
177 112
89 95
168 117
120 170
197 79
81 39
195 32
213 176
8 54
25 13
94 111
134 9
22 100
31 169
14 62
161 112
29 173
63 57
53 138
160 208
15 184
43 205
143 93
51 182
126 183
126 191
188 95
162 93
193 44
128 116
192 46
171 99
172 101
206 204
162 165
81 119
127 45
68 77
141 18
150 95
180 178
198 136
16 55
170 116
87 206
0 193
23 57
1 34
161 115
15 86
194 28
4 119
94 129
26 174
96 176
4 200
126 109
109 42
12 171
57 207
135 30
55 64
91 28
6 72
195 61
146 156
5 191
24 179
159 97
140 76
10 115
59 114
39 137
88 102
40 63
48 176
94 180
127 214
107 61
124 149
35 124
43 213
135 17
80 203
161 101
212 179
183 211
167 173
27 45
129 130
140 139
17 16
122 121
199 119
210 78
108 107
84 198
141 122
161 65
143 54
66 148
17 90
146 125
123 50
48 201
144 22
98 90
63 109
58 181
80 61
146 87
118 56
20 112
163 137
154 208
49 71
167 193
52 72
175 163
202 168
63 148
139 185
188 137
83 209
74 92
200 39
122 45
31 129
212 159
37 5
144 24
77 192
201 165
70 77
30 103
156 137
172 70
97 150
166 21
171 80
147 52
66 155
35 14
12 196
70 198
107 212
181 49
60 41
16 114
62 194
56 115
69 7
6 67
177 153
8 66
33 84
2 153
160 113
152 46
147 69
40 187
85 178
190 175
82 97
81 96
74 89
73 88
12 60
33 101
105 103
67 65
123 14
175 95
19 110
98 64
58 207
159 80
141 214
60 195
196 166
104 22
177 10
104 79
82 89
156 204
196 189
86 28
196 208
145 49
8 204
141 33
87 200
20 172
205 206
76 100
197 169
180 197
73 51
166 41
197 14
50 47
104 75
150 120
205 39
195 74
177 215
98 103
164 187
94 194
93 213
123 88
1 52
107 36
136 11
3 132
202 99
26 211
146 190
163 43
67 2
108 189
154 117
48 173
49 174
27 38
66 201
19 160
26 59
68 215
64 59
96 128
58 142
156 96
87 92
105 44
210 11
91 124
167 183
77 118
1 17
149 86
13 100
26 23
168 36
152 85
135 72
121 38
0 174
185 130
34 98
55 138
134 208
111 104
164 145
72 45
0 29
73 149
71 193
151 155
19 9
154 185
135 142
24 154
151 213
69 84
132 170
43 128
68 78
53 6
5 48
191 29
133 12
122 147
93 173
162 155
25 130
35 178
30 121
2 198
31 22
186 125
200 131
73 47
182 158
42 165
78 46
189 117
76 130
190 120
51 10
59 105
5 42
164 211
188 131
106 178
184 88
190 128
83 71
203 92
143 148
140 157
60 186
133 134
13 21
169 113
78 115
4 132
209 114
180 157
34 138
169 185
37 54
100 113
18 30
27 114
126 174
189 99
171 3
163 199
29 148
202 61
20 52
76 111
1 214
20 2
116 206
202 41
133 32
15 56
65 118
18 6
106 91
110 157
37 81
38 83
210 158
67 33
184 11
184 10
105 142
188 125
140 106
209 90
8 176
13 179
7 70
192 136
24 36
44 207
152 102
4 175
191 155
215 11
75 62
123 28
84 215
68 101
203 131
186 3
97 41
85 194
203 150
64 181
116 119
23 187
143 42
58 90
152 86
108 9
53 7
127 53
182 136
74 125
124 75
25 117
209 23
83 103
144 110
145 109
40 167
153 182
51 46
205 54
214 172
0 57
89 132
151 37
127 16
145 207
133 159
151 199
69 112
35 102
82 32
31 62
15 192
164 165
27 142
38 181
34 18
92 170
147 138
3 131
"""
self.waters = """
0.0 0.0 0.36788
0.41667 0.20834 0.53433
0.1237 0.8763 0.61709
0.66927 0.95963 0.09763
0.79297 0.20703 0.1545
0.66927 0.95963 0.2838
0.0 0.625 0.55846
0.95963 0.29037 0.59763
0.41667 0.33333 0.25
0.20833 0.41667 0.96125
0.54037 0.08073 0.69287
0.7526 0.8763 0.69287
0.58334 0.79167 0.03433
0.0 0.625 0.94154
0.7526 0.8763 0.80713
0.95963 0.29037 0.7162
0.125 0.25 0.5
0.25 0.125 0.5
0.79167 0.58334 0.53433
0.375 0.375 0.94154
0.33074 0.04037 0.59763
0.79167 0.58333 0.96567
0.1237 0.8763 0.88291
0.29037 0.33073 0.40237
0.375 0.0 0.94154
0.0 0.375 0.94154
0.04037 0.33074 0.40237
0.33334 0.66667 0.48029
0.95964 0.29037 0.7838
0.0 0.0 0.31999
0.875 0.75 0.5
0.0 0.0 0.86788
0.20833 0.41667 0.03433
0.70963 0.66927 0.59763
0.66667 0.33334 0.51971
0.70963 0.66927 0.7838
0.41667 0.20833 0.96567
0.66667 0.08333 0.25
0.58334 0.79167 0.46567
0.45963 0.91927 0.19287
0.54297 0.45703 0.3455
0.79167 0.58333 0.03875
0.45963 0.91927 0.30713
0.04037 0.70963 0.2162
0.04037 0.70963 0.40237
0.33334 0.66667 0.52414
0.33074 0.29037 0.7162
0.33074 0.04037 0.7838
0.66927 0.70963 0.2838
0.66927 0.95964 0.40237
0.54037 0.08074 0.80713
0.33073 0.04037 0.7162
0.375 0.0 0.55846
0.0 0.375 0.55846
0.41667 0.08333 0.25
0.875 0.125 0.5
0.91927 0.45963 0.69287
0.2474 0.1237 0.38291
0.375 0.0 0.44154
0.0 0.375 0.44154
0.625 0.625 0.05846
0.41667 0.20833 0.03875
0.0 0.0 0.81999
0.41407 0.20703 0.3455
0.79167 0.20834 0.46125
0.91927 0.45963 0.62219
0.29037 0.33073 0.2838
0.95964 0.66927 0.59763
0.45703 0.54297 0.6545
0.70963 0.04037 0.59763
0.1237 0.2474 0.61709
0.66927 0.70963 0.40237
0.20833 0.79167 0.53875
0.33333 0.91667 0.75
0.29037 0.33074 0.09763
0.1237 0.8763 0.80713
0.91926 0.45963 0.87781
0.20704 0.41407 0.6545
0.54037 0.45963 0.69287
0.45703 0.91407 0.8455
0.375 0.0 0.05846
0.66927 0.95963 0.2162
0.0 0.375 0.05846
0.625 0.625 0.44154
0.7526 0.8763 0.61709
0.33073 0.29037 0.7838
0.08334 0.41667 0.75
0.41407 0.20703 0.1545
0.58333 0.91667 0.75
0.04037 0.33074 0.09763
0.41667 0.20833 0.46125
0.91927 0.45964 0.80713
0.2474 0.1237 0.11709
0.04037 0.70963 0.2838
0.20703 0.41407 0.8455
0.08073 0.54037 0.12219
0.66927 0.70963 0.2162
0.0 0.625 0.05846
0.66667 0.33334 0.47587
0.79167 0.20833 0.03875
0.95963 0.66927 0.90237
0.54037 0.45963 0.62219
0.58333 0.66667 0.75
0.79167 0.58333 0.46125
0.20703 0.79297 0.8455
0.0 0.625 0.44154
0.66667 0.33334 0.81999
0.25 0.125 0.0
0.125 0.25 0.0
0.54297 0.08593 0.3455
0.33074 0.29037 0.90237
0.08593 0.54297 0.8455
0.54037 0.08073 0.62219
0.70963 0.66927 0.90237
0.20833 0.41667 0.46567
0.66667 0.33333 0.68001
0.0 0.0 0.18001
0.79167 0.20833 0.96567
0.08594 0.54297 0.6545
0.8763 0.1237 0.19287
0.8763 0.7526 0.11709
0.75 0.875 0.5
0.58334 0.79167 0.53875
0.70963 0.04037 0.7838
0.95964 0.66927 0.7838
0.45963 0.54037 0.12219
0.79297 0.20703 0.3455
0.20834 0.41667 0.53875
0.8763 0.7526 0.19287
0.1237 0.2474 0.88291
0.95963 0.29037 0.90237
0.45963 0.91927 0.12219
0.8763 0.1237 0.11709
0.33334 0.66667 0.01971
0.33334 0.66667 0.97587
0.125 0.875 0.5
0.0 0.0 0.68001
0.33333 0.66667 0.18001
0.79167 0.20833 0.53433
0.54037 0.08074 0.87781
0.66667 0.33333 0.86278
0.625 0.625 0.55846
0.20833 0.79167 0.46567
0.29037 0.95963 0.2838
0.33074 0.04037 0.90237
0.45963 0.91926 0.37781
0.54297 0.45703 0.1545
0.625 0.0 0.55846
0.2474 0.1237 0.30713
0.08334 0.66667 0.75
0.04037 0.70963 0.09763
0.91667 0.33333 0.25
0.33333 0.41667 0.75
0.20704 0.79297 0.6545
0.625 0.0 0.94154
0.04037 0.33074 0.2838
0.45964 0.54037 0.19287
0.54037 0.45963 0.87781
0.95964 0.66927 0.7162
0.20834 0.79167 0.03433
0.625 0.625 0.94154
0.66667 0.33333 0.63722
0.08074 0.54037 0.30713
0.08074 0.54037 0.19287
0.33333 0.66667 0.36278
0.33333 0.66667 0.31999
0.875 0.75 0.0
0.79297 0.58593 0.3455
0.66667 0.33333 0.98029
0.7526 0.8763 0.88291
0.0 0.0 0.13213
0.625 0.0 0.05846
0.33074 0.29037 0.59763
0.8763 0.7526 0.30713
0.8763 0.1237 0.38291
0.91407 0.45703 0.1545
0.66667 0.58333 0.25
0.45703 0.91407 0.6545
0.54037 0.45963 0.80713
0.20834 0.79167 0.96125
0.45703 0.54297 0.8455
0.625 0.0 0.44154
0.1237 0.8763 0.69287
0.91407 0.45703 0.3455
0.70963 0.04037 0.7162
0.70963 0.04037 0.90237
0.66927 0.70963 0.09763
0.45963 0.54037 0.37781
0.33333 0.66667 0.13722
0.875 0.125 0.0
0.79297 0.58593 0.1545
0.8763 0.1237 0.30713
0.1237 0.2474 0.69287
0.8763 0.7526 0.38291
0.1237 0.2474 0.80713
0.375 0.375 0.05846
0.75 0.875 0.0
0.58593 0.79297 0.8455
0.0 0.0 0.63213
0.04037 0.33073 0.2162
0.54297 0.08593 0.1545
0.45963 0.54037 0.30713
0.66667 0.33333 0.02414
0.29037 0.95964 0.09763
0.29037 0.33074 0.2162
0.29037 0.95963 0.2162
0.2474 0.1237 0.19287
0.29037 0.95963 0.40237
0.58334 0.79167 0.96125
0.375 0.375 0.44154
0.70963 0.66927 0.7162
0.08073 0.54037 0.37781
0.125 0.875 0.0
0.91667 0.58333 0.25
0.375 0.375 0.55846
0.58593 0.79296 0.6545
"""
self.coord = "relative"
self.cages = """
12 0.5 0.0 0.0
14 0.66667 0.33333 0.20811
12 0.0 0.5 0.0
14 -0.33333 -0.66667 -0.70811
16 0.33333 0.66667 -0.09654
12 0.66667 0.33333 -0.07885
12 -0.16147 0.16147 0.65668
16 0.0 0.0 0.44154
12 -0.16147 -0.32294 0.65668
12 0.5 0.5 0.5
12 -0.16147 0.16147 -0.15668
12 -0.32294 -0.16147 0.15668
12 0.16147 0.32294 -0.65668
16 -0.33333 -0.66667 -0.59654
12 -0.66667 -0.33333 -0.57885
15 0.0 0.0 0.75
12 -0.33333 -0.66667 0.57885
16 -0.66667 -0.33333 0.59654
12 0.16147 0.32294 0.15668
12 -0.5 0.0 -0.5
12 0.16147 -0.16147 0.15668
12 -0.32294 -0.16147 -0.65668
12 0.16147 -0.16147 -0.65668
15 -0.33333 -0.66667 0.75
16 0.0 0.0 -0.44154
15 0.0 0.0 0.25
12 0.33333 0.66667 0.07885
16 0.66667 0.33333 0.09654
12 0.0 -0.5 0.5
14 -0.66667 -0.33333 0.70811
12 -0.5 -0.5 0.0
14 0.33333 0.66667 -0.20811
12 0.32294 0.16147 0.65668
15 0.33333 0.66667 0.25
16 0.0 0.0 -0.94154
12 -0.16147 -0.32294 -0.15668
16 0.0 0.0 0.94154
12 0.32294 0.16147 -0.15668
"""
self.bondlen = 3
self.cell = """
12.787696703673442 0.0 0.0
-6.393848351836717 11.07447020127173 0.0
4.1761514327380356e-15 7.233306461603835e-15 68.20172862323463
"""
self.density = 0.6684590950407042
self.cell = cellvectors(a=12.787696703673442,
b=12.787696703673442,
c=68.20172862323463,
C=119.99999999999999) | PypiClean |
/Flask-WaffleConf-0.3.1.tar.gz/Flask-WaffleConf-0.3.1/docs/source/usage.rst | Usage in views
==============
Since *version 0.3.0* the extension does not impose a specific view or template
to use. Instead, you can implement your own views and work with the
:py:meth:`~flask_waffleconf.core._WaffleState` instance in the application.
Initialization
--------------
To initialize the extension, two different things are required: a model
implementing the :py:class:`~flask_waffleconf.models.WaffleMixin` interface,
for instance using SQLAlchemy or peewee; and a configured
:py:class:`~flask_waffleconf.store.WaffleStore`.
As of *version 0.3.0*, there are two stores available (although it is very easy
to create a new one using the ``WaffleStore`` class as a base):
- :py:class:`~flask_waffleconf.store.AlchemyWaffleStore`: uses ``SQLAlchemy``
for the database backend
- :py:class:`~flask_waffleconf.store.PeeweeWaffleStore`: uses ``peewee``
for the database backend
*Changed in 0.3.1*: stored configurations are not updated when the extension is
initialized and require manually calling the update method.
.. note::
Model and store should use the same ORM/library as backend.
Obtaining stored values
-----------------------
The following simple views are an example of how you can use the extension to
parse stored values of configuration variables:
.. code-block:: python
from flask import current_app
@app.route('/all')
def get_all():
"""Returns the whole list of stored configuration variables."""
state = current_app.extensions['waffleconf']
# Get all the variables
parsed = state.parse_conf() # Returns a dict
return parsed
@app.route('/<key>')
def get_key(key):
"""Return the value of a single key."""
state = current_app.extensions['waffleconf']
# Get variable
parsed = state.parse_conf([key,]) # Returns a dict
return parsed
As the :py:meth:`~flask_waffleconf.core.WaffleState.parse_conf` method returns
a Python ``dict``, creating a form for showing or updating the values is very
easy.
Updating stored values
----------------------
Similarly, it is also possible to update values at runtime using a custom view:
.. code-block:: python
from flask import current_app, form
@app.route('/update', methods=['POST'])
def update_vars():
"""Update the vars with the values of a hypothetical form."""
# Suppose WTForms with fields `SITENAME` and `DESCRIPTION`
form = Form(request.form)
if form.validate():
vals = {
'SITENAME': form.sitename.data,
'DESCRIPTION': form.desc.data
}
state = current_app.extensions['waffleconf']
state.update_db(vals)
| PypiClean |
/L_graph_system-0.1.1-py3-none-any.whl/L_graph_system/lgraph.py | from .vertex import Vertex
from .arc import Arc
import re
import copy
import pickle
def load_graph(filename):
with open(filename, 'rb') as input_f: # Overwrites any existing file.
res = pickle.load(input_f)
return res
def save_graph(graph, filename):
with open(filename, 'wb') as output:
pickle.dump(graph, output, pickle.HIGHEST_PROTOCOL)
class LGraph:
def __init__(self, brackets=None):
if brackets is None:
brackets = (('(', ')'), ('[', ']'))
self.__vertexes = {}
self.__arcs = {}
self.__brackets = brackets
# we need to create a scheme of program structure.
# we need to have a special form for L-graph core
# basing on core, we can check for consistency of l-graph
# this is a task of checking for empty language
# we may check for determinative l-graph, search for algorithm is needed
# we may also try SAGE
self.__start_vertexes = []
self.__finish_vertexes = []
def add_arc(self, start_vertex, end_vertex, label='', bracket_trace='', key=None):
flag = 0 if bracket_trace == '' else 1
for b in self.__brackets:
for bb in b:
if bb in bracket_trace:
flag = 0
break
if flag:
raise NameError('Incorrect brackets')
if key:
if key in self.__arcs.keys():
raise NameError(f'Arc with key "{key}" already exists')
else:
i = 1
key = f'{i}'
while key in self.__arcs.keys():
i += 1
key = f'{i}'
# if the key is generated, we get the first non-occupied key
if start_vertex in self.__vertexes.keys():
self.__vertexes[start_vertex].out_arcs.add(key)
else:
self.__vertexes[start_vertex] = Vertex(start_vertex)
# if it is not defined, it will be new unused vertex.
self.__vertexes[start_vertex].out_arcs.add(key)
if end_vertex in self.__vertexes.keys():
self.__vertexes[end_vertex].in_arcs.add(key)
else:
self.__vertexes[end_vertex] = Vertex(end_vertex)
self.__vertexes[end_vertex].in_arcs.add(key)
self.__arcs[key] = Arc(key, self.__vertexes[start_vertex], self.__vertexes[end_vertex], label, bracket_trace)
def add_vertex(self, name=None):
if name:
if name in self.__vertexes.keys():
raise NameError(f'Vertex with name "{name}" already exists.')
else:
new_name = name
else:
i = 1
new_name = f'{i}'
while new_name in self.__vertexes.keys():
i += 1
new_name = f'{i}'
self.__vertexes[new_name] = Vertex(new_name)
def remove_arc(self, key):
self.__arcs[key].remove_arc()
self.__arcs.pop(key)
def remove_vertex(self, name):
j = self.__vertexes[name].in_arcs.copy()
for i in j:
self.__arcs[i].remove_arc()
self.__arcs.pop(i)
j = self.__vertexes[name].out_arcs.copy()
for i in j:
self.__arcs[i].remove_arc()
self.__arcs.pop(i)
self.__vertexes.pop(name)
def __str__(self):
res = "Vertexes: \n"
for b in self.__vertexes.keys():
res += b + ' ' + str(self.__vertexes[b])
res += "\nArcs:\n"
for a, c in self.__arcs.items():
res += str(c) + '\n'
res += "Start_Vertexes: \n"
for b in self.__start_vertexes:
res += b + '\n'
res += "Finish_Vertexes: \n"
for b in self.__finish_vertexes:
res += b + '\n'
return res
def set_start(self, name):
if name in self.__vertexes.keys():
self.__start_vertexes.append(name)
else:
raise NameError(f'No vertex with name "{name}"')
def set_finish(self, name):
if name in self.__vertexes.keys():
self.__finish_vertexes.append(name)
else:
raise NameError(f'No vertex with name "{name}"')
def remove_start(self, name):
if name in self.__start_vertexes:
self.__start_vertexes.remove(name)
else:
raise NameError(f'No start vertex with name "{name}"')
def remove_finish(self, name):
if name in self.__finish_vertexes:
self.__finish_vertexes.remove(name)
else:
raise NameError(f'No finish vertex with name "{name}"')
def solve(self, in_string, arc_trace=False, vertex_trace=False):
current_vertex_name = self.__start_vertexes[0]
# current_vertex = self.__vertexes[current_vertex_name]
brackets_path = [[], []]
path, arc_path = self.__solve_one(in_string, current_vertex_name, brackets_path)
if arc_trace:
return arc_path
if vertex_trace:
return path
if len(path) == 0:
return False
else:
return True
def __solve_one(self, in_string, vertex_key, old_brackets_path):
if vertex_key in self.__finish_vertexes and len(in_string) == 0 and len(old_brackets_path[0]) == 0 and len(old_brackets_path[1]) == 0:
return [vertex_key], []
# brackets_path[0] for first type of brackets
# brackets_path[1] for the second
current_vertex = self.__vertexes[vertex_key]
path = []
arc_trace = []
for cur in current_vertex.out_arcs:
# we check conditions whether an arc is suitable for us
flag_to_check = False
brackets_path = copy.deepcopy(old_brackets_path)
# copy is needed to copy nested list correctly
new_string = ""
if len(in_string) == 0:
if self.__arcs[cur].label == '':
flag_to_check = True
new_string = in_string
else:
continue
else:
if self.__arcs[cur].label == in_string[0] or self.__arcs[cur].label == '':
flag_to_check = True
if self.__arcs[cur].label == in_string[0]:
new_string = in_string[1:]
else:
new_string = in_string
# we checked all conditions when the ark may be suitable for us to go further.
if flag_to_check:
new_current_vertex = self.__arcs[cur].end
current_brackets = self.__arcs[cur].brackets
first_brackets = ''
second_brackets = ''
if len(current_brackets) > 0:
# first open brackets
res = current_brackets.find(self.__brackets[0][0])
if res != -1:
if len(current_brackets) > res:
if current_brackets[res+1:res+2].isnumeric():
first_brackets = current_brackets[res:res+2]
else:
first_brackets = current_brackets[res]
else:
first_brackets = current_brackets[res]
elif current_brackets.find(self.__brackets[0][1]) != -1:
# first close brackets
res = current_brackets.find(self.__brackets[0][1])
if res != -1:
if len(current_brackets) > res:
if current_brackets[res + 1:res + 2].isnumeric():
first_brackets = current_brackets[res:res + 2]
else:
first_brackets = current_brackets[res]
else:
first_brackets = current_brackets[res]
else:
first_brackets = ''
# second open brackets
res = current_brackets.find(self.__brackets[1][0])
if res != -1:
if len(current_brackets) > res:
if current_brackets[res + 1:res + 2].isnumeric():
second_brackets = current_brackets[res:res + 2]
else:
second_brackets = current_brackets[res]
else:
second_brackets = current_brackets[res]
elif current_brackets.find(self.__brackets[1][1]) != -1:
# second close brackets
res = current_brackets.find(self.__brackets[1][1])
if res != -1:
if len(current_brackets) > res:
if current_brackets[res + 1:res + 2].isnumeric():
second_brackets = current_brackets[res:res + 2]
else:
second_brackets = current_brackets[res]
else:
second_brackets = current_brackets[res]
else:
second_brackets = ''
# The following parts helps track the resolving process
# print()
# print(cur)
# print(current_brackets)
# print(brackets_path)
# print('first brackets: ', first_brackets)
# print('second brackets: ', second_brackets)
# if bracket is opening we add it
# should revise the whole bracket check
if self.__brackets[0][0] in first_brackets:
brackets_path[0].append(first_brackets)
elif self.__brackets[0][1] in first_brackets:
if len(brackets_path[0]) > 0:
if self.__brackets[0][0] in brackets_path[0][len(brackets_path[0])-1]:
# brackets_path[0][len(brackets_path[0])-1] means the last added bracket
# if bracket is closing we try to close it
if brackets_path[0][len(brackets_path[0])-1][1:] == first_brackets[1:]:
brackets_path[0].pop()
# some kind of index check
else:
continue
# check for index is made after brackets are categorised
else:
continue
else:
continue
# print(cur)
# the same for the second type of the brackets
if self.__brackets[1][0] in second_brackets:
brackets_path[1].append(second_brackets)
elif self.__brackets[1][1] in second_brackets:
if len(brackets_path[1]) > 0:
if self.__brackets[1][0] in brackets_path[1][len(brackets_path[1])-1]:
if brackets_path[1][len(brackets_path[1]) - 1][1:] == second_brackets[1:]:
brackets_path[1].pop()
# some kind of index check
else:
continue
else:
continue
else:
continue
new_path, new_arc_trace = self.__solve_one(new_string, new_current_vertex.name, brackets_path)
# print(new_path)
if len(new_path) > 0:
path = [vertex_key]
arc_trace = [cur]
path.extend(new_path)
arc_trace.extend(new_arc_trace)
break
# here we combine our path, and if it is not empty, it's good. if it is empty -> no path
return path, arc_trace
def cycles(self): # cycles without duplicates
cycle_res = self.__cycle_detection()
res = [list(i) for i in {*[tuple(sorted(i)) for i in cycle_res]}]
return res
def __cycle_detection(self): # all cycles
cycles = []
for cur in self.__vertexes.keys():
path = []
self.__cycle_depth_search(cur, [], path)
if len(path) > 0:
cycles.extend(path)
res = cycles
return res
def __cycle_depth_search(self, vertex_name, path, accumulator):
current_vertex = self.__vertexes[vertex_name]
new_path = path.copy()
if len(path) > 0:
if vertex_name == path[0]:
return new_path
if vertex_name in path:
return []
new_path.append(vertex_name)
for cur in current_vertex.out_arcs:
only_new_path = self.__cycle_depth_search(self.__arcs[cur].end.name, new_path, accumulator)
if len(only_new_path) > 0:
accumulator.append(only_new_path)
return []
def __arc_cycle_detection(self): # returns list of all lists of cycling arcs
cycles = self.__cycle_detection()
arc_cy = []
for cy in cycles:
cur_cycle = []
for ind in range(len(cy)):
new_arc = self.__vertexes[cy[ind]].in_arcs.intersection(self.__vertexes[cy[ind-1]].out_arcs)
for ar in new_arc:
cur_cycle.append(ar)
arc_cy.append(cur_cycle)
return arc_cy
def arc_cycles(self):
cycle = self.__arc_cycle_detection()
res = [list(i) for i in {*[tuple(sorted(i)) for i in cycle]}]
return res
def generate_from_grammar(self, in_grammar):
# inGrammar must be a list of string rules. I concider makig a special class for this purposes
# but it may be so small, that it seems to me that we can handle it right here
bracket_counter = 1
finish_vertex_flag = True
for inString in in_grammar:
# inString = "P->S@"
res = re.search("->", inString)
if res is None:
raise TypeError('Incorrect grammar')
left_part = inString[:res.start()] # label of the rule
right_part = inString[res.end():] # what we need to do
vertex_counter = 1
current_vertex = f'{left_part}_beg'
end_vertex = f'{left_part}_end'
begin_vertex = current_vertex
for position, symbol in enumerate(right_part):
# here we need to add an edge, if it is upper - than make 2 separate edjes and go forward
if symbol == '|':
current_vertex = f'{left_part}_beg'
continue # does not work, need to count symbol position
if symbol.isupper():
new_vertex = f'{symbol}_beg'
self.add_arc(current_vertex, new_vertex, '', f'({bracket_counter}')
current_vertex = f'{symbol}_end'
if right_part.endswith(symbol):
new_vertex = end_vertex
elif right_part[position+1] == '|':
new_vertex = end_vertex
else:
new_vertex = f'{left_part}{vertex_counter}'
vertex_counter += 1
self.add_arc(current_vertex, new_vertex, '', f'){bracket_counter}')
current_vertex = new_vertex
bracket_counter += 1
else:
if right_part.endswith(symbol):
new_vertex = end_vertex
elif right_part[position+1] == '|':
new_vertex = end_vertex
else:
new_vertex = f'{left_part}{vertex_counter}'
vertex_counter += 1
self.add_arc(current_vertex, new_vertex, symbol, '')
current_vertex = new_vertex
if finish_vertex_flag:
finish_vertex_flag = False
self.set_start(begin_vertex)
self.set_finish(end_vertex)
def set_brackets(self, brackets):
if isinstance(tuple, brackets):
for brace in brackets:
if isinstance(tuple, brace):
self.__brackets = brackets
return
raise TypeError('Incorrect brackets')
def type_def(self):
brackets_path = set()
for cur_arc_name in self.__arcs.keys():
cur_arc = self.__arcs[cur_arc_name]
brackets_path.add(cur_arc.brackets)
if len(brackets_path) == 0:
return 'regular'
else:
flag_first = 0
flag_second = 0
for s in brackets_path:
for sym in s:
if sym in self.__brackets[0]:
flag_first = 1
if sym in self.__brackets[1]:
flag_second = 1
if flag_second != flag_first:
return 'context_free'
elif flag_first and flag_second:
return 'recursively_enumerable'
else:
return 'error'
def is_regular(self):
g_type = self.type_def()
if g_type == 'regular':
return True
else:
return False
def is_context_free(self):
g_type = self.type_def()
if g_type == 'regular' or g_type == 'context_free':
return True
else:
return False
def core(self, paired, neutral):
cycles = self.arc_cycles()
# define cycle type
paired_cycles = set()
neutral_cycles = set()
for cycle in cycles:
flag = 1
for cur_arc in cycle:
if self.__arcs[cur_arc].brackets != '':
for item in cycle:
paired_cycles.add(item)
flag = 0
break
if flag:
for item in cycle:
neutral_cycles.add(item)
neutral_arcs = dict.fromkeys(neutral_cycles, neutral)
paired_arcs = dict.fromkeys(paired_cycles, paired)
begin_vertex = self.__start_vertexes[0]
path = self.__core_depth(begin_vertex, paired_arcs, neutral_arcs, [[], []], [])
return path
def merge(self, another_graph):
return
def dead_ends(self):
res = set()
for vertex_name in self.__vertexes.keys():
if not(self.__finish_vertexes[0]) in self.__next_vertexes(vertex_name, set()):
res.add(vertex_name)
return res
def unattainable(self):
res = set()
for vertex_name in self.__vertexes.keys():
if not(vertex_name in self.__next_vertexes(self.__start_vertexes[0], set())):
res.add(vertex_name)
return res
def remove_unusable(self):
d = self.dead_ends()
u = self.unattainable()
d.update(u)
for vertex_name in d:
if vertex_name in self.__vertexes.keys():
self.remove_vertex(vertex_name)
def __next_vertexes(self, vertex_name, path):
cur_vertex = self.__vertexes[vertex_name]
next_vertexes = set()
for new_arc in cur_vertex.out_arcs:
if self.__arcs[new_arc].end.name in next_vertexes:
continue
if self.__arcs[new_arc].end.name in path:
continue
next_vertexes.add(self.__arcs[new_arc].end.name)
path.add(self.__arcs[new_arc].end.name)
new_next_vertexes = self.__next_vertexes(self.__arcs[new_arc].end.name, path)
for x in new_next_vertexes:
next_vertexes.add(x)
return next_vertexes
def __core_depth(self, vertex_key, paired, neutral, old_brackets_path, path_res):
if vertex_key in self.__finish_vertexes and len(old_brackets_path[0]) == 0 and len(old_brackets_path[1]) == 0:
new_path_res = copy.deepcopy(path_res)
new_path_res.append(vertex_key)
return new_path_res
current_vertex = self.__vertexes[vertex_key]
path = []
for cur in current_vertex.out_arcs:
# we check conditions whether an arc is suitable for us
new_path_res = copy.deepcopy(path_res)
flag_to_check = False
brackets_path = copy.deepcopy(old_brackets_path)
new_neutral = copy.copy(neutral)
new_paired = copy.copy(paired)
if cur in new_neutral.keys():
if new_neutral[cur] > 0:
flag_to_check = True
new_neutral[cur] -= 1
elif cur in new_paired.keys():
if new_paired[cur] > 0:
flag_to_check = True
new_paired[cur] -= 1
else:
flag_to_check = True
# copy is needed to copy nested list correctly
# we checked all conditions when the ark may be suitable for us to go further.
if flag_to_check:
new_current_vertex = self.__arcs[cur].end
current_brackets = self.__arcs[cur].brackets
first_brackets = ''
second_brackets = ''
if len(current_brackets) > 0:
# first open brackets
res = current_brackets.find(self.__brackets[0][0])
if res != -1:
if len(current_brackets) > res:
if current_brackets[res + 1:res + 2].isnumeric():
first_brackets = current_brackets[res:res + 2]
else:
first_brackets = current_brackets[res]
else:
first_brackets = current_brackets[res]
elif current_brackets.find(self.__brackets[0][1]) != -1:
# first close brackets
res = current_brackets.find(self.__brackets[0][1])
if res != -1:
if len(current_brackets) > res:
if current_brackets[res + 1:res + 2].isnumeric():
first_brackets = current_brackets[res:res + 2]
else:
first_brackets = current_brackets[res]
else:
first_brackets = current_brackets[res]
else:
first_brackets = ''
# second open brackets
res = current_brackets.find(self.__brackets[1][0])
if res != -1:
if len(current_brackets) > res:
if current_brackets[res + 1:res + 2].isnumeric():
second_brackets = current_brackets[res:res + 2]
else:
second_brackets = current_brackets[res]
else:
second_brackets = current_brackets[res]
elif current_brackets.find(self.__brackets[1][1]) != -1:
# second close brackets
res = current_brackets.find(self.__brackets[1][1])
if res != -1:
if len(current_brackets) > res:
if current_brackets[res + 1:res + 2].isnumeric():
second_brackets = current_brackets[res:res + 2]
else:
second_brackets = current_brackets[res]
else:
second_brackets = current_brackets[res]
else:
second_brackets = ''
if self.__brackets[0][0] in first_brackets:
brackets_path[0].append(first_brackets)
elif self.__brackets[0][1] in first_brackets:
if len(brackets_path[0]) > 0:
if self.__brackets[0][0] in brackets_path[0][len(brackets_path[0]) - 1]:
# brackets_path[0][len(brackets_path[0])-1] means the last added bracket
# if bracket is closing we try to close it
if brackets_path[0][len(brackets_path[0]) - 1][1:] == first_brackets[1:]:
brackets_path[0].pop()
# some kind of index check
else:
continue
# check for index is made after brackets are categorised
else:
continue
else:
continue
# the same for the second type of the brackets
if self.__brackets[1][0] in second_brackets:
brackets_path[1].append(second_brackets)
elif self.__brackets[1][1] in second_brackets:
if len(brackets_path[1]) > 0:
if self.__brackets[1][0] in brackets_path[1][len(brackets_path[1]) - 1]:
if brackets_path[1][len(brackets_path[1]) - 1][1:] == second_brackets[1:]:
brackets_path[1].pop()
# some kind of index check
else:
continue
else:
continue
else:
continue
new_path_res.append(vertex_key)
new_path = self.__core_depth(new_current_vertex.name, new_paired, new_neutral, brackets_path, new_path_res)
# trying to get consistent flat list regardless of recursion direction
if len(new_path) > 0:
if len(path) > 0:
if isinstance(path[0], list):
path.append(new_path)
else:
if isinstance(new_path[0], list):
path_x = path
path = new_path
path.append(path_x)
else:
path_x = [path]
path = path_x
path.append(new_path)
else:
path.extend(new_path)
return path
def reduction(self):
old_vertexes = [x for x in self.__vertexes.keys()]
for cur_vertex in old_vertexes:
if not(cur_vertex in self.__vertexes.keys()):
continue
cur = self.__vertexes[cur_vertex]
print(cur_vertex)
out_arcs = cur.out_arcs
in_arcs = cur.in_arcs
beginning, destination, new_label, new_brackets = '', '', '', ''
flag = 1
if len(in_arcs) == 1 and len(out_arcs) == 1:
print(cur_vertex, in_arcs, out_arcs)
for a_in in in_arcs:
for a_out in out_arcs:
if self.__arcs[a_in].label == '':
if self.__arcs[a_in].brackets == '':
new_brackets = self.__arcs[a_out].brackets
elif self.__arcs[a_out].brackets == '':
new_brackets = self.__arcs[a_in].brackets
else:
flag = 0
new_label = self.__arcs[a_out].label
elif self.__arcs[a_out].label == '':
if self.__arcs[a_in].brackets == '':
new_brackets = self.__arcs[a_out].brackets
elif self.__arcs[a_out].brackets == '':
new_brackets = self.__arcs[a_in].brackets
else:
flag = 0
new_label = self.__arcs[a_in].label
else:
flag = 0
if flag:
destination = self.__arcs[a_out].end.name
beginning = self.__arcs[a_in].start.name
if flag:
self.add_arc(beginning, destination, new_label, new_brackets)
self.remove_vertex(cur_vertex)
@property
def vertexes(self):
return self.__vertexes
@property
def arcs(self):
return self.__arcs
@property
def brackets(self):
return self.__brackets
@property
def start_vertexes(self):
return self.__start_vertexes
@property
def finish_vertexes(self):
return self.__finish_vertexes | PypiClean |
/LiPD-0.2.8.9.tar.gz/LiPD-0.2.8.9/README.md | # LiPD Utilities - Python
-----
[](https://zenodo.org/badge/latestdoi/24036/nickmckay/LiPD-utilities)
[]()
[]()
[]()
Input/output and manipulation utilities for LiPD files in Matlab, R and Python.
## What is it?
----
LiPD is short for Linked PaleoData. LiPD is the data standard for paleoclimatology and the exchange of data amongst paleoclimate experts. This package will help you convert your existing database of paleoclimate observations into LiPD files. Moreover, it contains tools to analyze and manipulate LiPD data.
## Features
------
* Read & write LiPD files
* Extract & collapse a time series for data analysis
* Filter & query a time series for subset data
* Convert Excel files to LiPD files
* Convert NOAA files to/from LiPD files
* Update LiPD publication data through DOI.org
* Validate LiPD files through lipd.net API
## Requirements
-------
- [Python 3.4+](https://www.python.org)
- Python IDE (Spyder or PyCharm are highly recommended)
- [pip](https://pip.pypa.io/en/stable/installing/)
## Installation
------------
Python 3.4 / 3.5
```
pip install LiPD
```
Python 3.6+
```
pip3 install --egg LiPD
```
## Usage
----------------
Using your preferred Python IDE or a Python console, you can import the LiPD package using:
```
import lipd
```
Here are the major functions within the package. This is a short preview. More in-depth examples can found further in the guide (coming soon):
```
lipd.readExcel(path="")
lipd.readNoaa(path="")
D = lipd.readLipd(path="")
lipd.writeLipd(D, path="")
ts = lipd.extractTs(D, chron=False)
D = lipd.collapseTs(ts)
idx = lipd.queryTs(ts, expression)
new_ts = lipd.filterTs(ts, expression)
D = lipd.excel()
D = lipd.doi(D)
```
## How to Cite this code
------
<a href="http://doi.org/10.5281/zenodo.60813"><img src="https://zenodo.org/badge/24036/nickmckay/LiPD-utilities.svg"></a>
Use this link to visit the Zenodo website. It provides citation information in many popular formats.
## Further information
----------
Github:
https://github.com/nickmckay/LiPD-utilities
Linked Earth Wiki:
wiki.linked.earth
## Contact
-------
If you are having issues, please let me know.
Contact me at heiser@nau.edu.
## License
-------
The project is licensed under the GNU Public License. Please refer to the file called LICENSE.

| PypiClean |
/EGCG-Core-0.13.tar.gz/EGCG-Core-0.13/CHANGELOG.md | Changelog for EGCG-Core
===========================
0.13 (2019-11-01)
-----------------
- Clarity function uses the reporting app lims end point whenever possible.
0.12 (2019-09-30)
-----------------
- Integration tests: Load data in integration tests
- Use a more generic exception to catch errors when emailling
- Rest_communication: Add Explicit retries
- Retrieval of default genome version now use the reporting app API
- In clarity script: Update delivery step name
0.11.2 (2019-08-07)
-------------------
- AsanaNotification compatibility update for Asana API changes
0.11.1 (2019-07-26)
-------------------
- Updating Jinja2 to 2.10.1
- Updating pyclarity_lims to 0.4.8
0.11 (2019-06-07)
-----------------
- Make methods to access local NCBI database public
- Change constant name from gender to sex (breaks backward compatibility)
0.10 (2019-04-04)
-----------------
- Adding LoggingConfiguration.reset
- Removing PBSExecutor
- Removing implicit parameters on cluster jobs, esp. cpu=1 and mem=2
- Allowing Executor to call start() in the same way as other executors
- Process-safe check logging in integration_testing
- Catching all exceptions in Communicator.\_\_del\_\_
- Stricter versions in requirements.txt
0.9.1 (2018-11-22)
------------------
- Adding `-` and `.` to replaceable characters in `clarity.sanitise_user_id`
- Making Session objects unique to child processes in `rest_communication`
0.9 (2018-09-14)
----------------
- Breaking change: removed `clarity.get_expected_yield_for_sample`
- Catching notification failures
- Added retries and multiprocessing lock to `rest_communication`
- Added log warnings to `util.find_file`
- Added `integration_testing` library and runner
0.8.2 (2018-05-28)
------------------
- New constant to record number of Phix reads
0.8.1 (2018-02-09)
------------------
- New constants capturing the Interop metrics
- New function for querying dict with dot notation
0.8 (2017-11-29)
----------------
- New EmailSender class that take most of the feature of EmailNotification
0.7.5 (2017-11-23)
------------------
- update constants: Remove expected yield and add more explicit required yield/yieldQ30/coverage
- code refactor
0.7.4 (2017-10-31)
------------------
- Bugfix in archive management.
- Add Picard and mapping stats constants
0.7.3 (2017-09-01)
------------------
- Fixed RecursionError when calling `get_documents` on large collections with `all_pages=True`
- Add new option to rest_communication.post_entry to submit payload without json
0.7.2 (2017-08-03)
------------------
- Allow Lims cached connection to be overridden
0.7.1 (2017-06-08)
------------------
- Fix data release workflow name
0.7 (2017-06-08)
----------------
- Add ability to upload files through `rest_communication`
- New functions for finding and routing to clarity steps
- Notifications (email, asana and log) can now accept attachments
- Added a retry in archive_management.register_for_archiving
0.6.12 (2017-05-16)
-------------------
- add new constants to store trimming/filtering of run elements
0.6.11 (2017-05-04)
-------------------
- get_genome_version can check the config file for the default version if species is provided
0.6.10 (2017-04-26)
-------------------
- Simplify Configuration classes to have only one that support all use-cases
- New send_mail function for sending one email
0.6.9 (2017-03-24)
------------------
- Enforced the usage of `log_cfg.set_log_level()` rather than modifying `log_cfg.log_level`
- More error reporting in archive_management
- Removed unused Executor file path validation
- Added 204 to Communicator.successful_statuses
- Fixed a bug where `If-Match` was not passed to Communicator._req when using token auth
- Updated `asana` to 0.6.2
0.6.8 (2017-03-15)
------------------
- Added `DATASET_RESUME` to constants
0.6.7 (2017-02-23)
------------------
- First version to support release on Pypi
- Add support for dealing with lfs hsm_x command to manage the archive.
- Add get_genome_version in clarity functions
0.6.5
-----
- Fix `get_project`
0.6.4
-----
- executors now retry their job submissions upon failure, up to three times
- some log messages from notifications have been reduced to debug
- `clarity.get_sample_gender` now checks for a 'Sex' UDF before checking 'Gender'
- in `rest_communication`, the building of requests has been fixed, as below:
In Communicator.get_content, we used to build a url manually via `api_url` and pass it to `_req`. This was because we had to cast dicts to strings manually:
```python
where = {'this': 'that', 'other': None}
cast_where = '{"this":"that","other":null}'
```
However, the removal of spaces that this involved meant that any query for a field containing a space resulted in a bad query:
```python
where = {'field_name': 'with spaces'}
cast_where = '{"field_name":"withspaces"}'
```
To fix this, we now pass the query string through [params](http://docs.python-requests.org/en/master/user/quickstart/#passing-parameters-in-urls), and do Json serialisation on any dict params:
```python
params = {'page': 1, 'where': {'this': 'that', 'other': None}}
query_string = '?page=1&where={"this": "that", "other": null}'
```
0.6.3
-----
0.6.2
-----
Emergency fix in move_dir where the destination file was named after the linked file instead of the link name
Downloads
0.6.1
-----
Improve util and error handling
Add function to retrieve project from LIMS API
0.6
---
This version adds the ability to cancel currently running cluster jobs in executor. Also fixes a bug in util.move_dir.
0.5.1
-----
Minor version adding evenness
0.5
---
Executors have been fixed to process the output of sacct properly. Script writers have also been refactored/simplified.
Downloads
0.4.4
-----
0.4.3
-----
This version add the ability to Notify a log file, through email or over Asana tasks.
It also adds new constant and allow the Configuration object to still work even when no config file is passed
0.4.2
-----
0.4.1
-----
This version fixes a bug in `EnvConfiguration`, where it wasn't selecting a new environment properly. Two fields have also been added to `constants` for upcoming versions of Analysis-Driver and Reporting-App.
0.4
---
Bugs have been fixed in `clarity`, `ncbi` and `rest_communication`. There is also now a more flexible, object oriented `rest_communication`, where a `Communicator` object can be created with a base url and username-password or token authentication.
0.3.1
-----
This adds a new field for Y-chromosome coverage to `constants`
0.3
---
This version is able to send authentication headers in `rest_communication` transactions. It now implements lazy loading of database connections and configs, so it is possible to, e.g, import rest_communication without importing ncbi, which requires sqlite3. It also allows egcg_core.config to switch its config file, allowing client apps to do, in `__init__.py`:
``` python
import egcg_core.config
egcg_core.config.cfg.load_config_file('/path/to/a_config.yaml')
# executors, ncbi, etc, can now use the same config file as the client app
```
0.2.4
-----
Since 0.2.1, this project now stores version information in __init__.__version__. A field for EdinburghGenomics/EGCG-Project-Management has also been added to constants, and an executor bug has been fixed.
0.2.3
-----
0.2.2
-----
0.2.1
-----
This version moves the deployment from distutils to setuptools, allowing automatic installation of subdependencies when EGCG-Core is installed as a requirement from another project.
0.2
---
This version adds functions to `util` specific to finding Fastqs, that previously lived in EdinburghGenomics/Analysis-Driver.
0.1
---
First version of the EGCG-Core package
| PypiClean |
/MaterialDjango-0.2.5.tar.gz/MaterialDjango-0.2.5/materialdjango/static/materialdjango/components/bower_components/app-layout/app-toolbar/README.md | ##<app-toolbar>
app-toolbar is a horizontal toolbar containing items that can be used for
label, navigation, search and actions.
### Example
Add a title to the toolbar.
```html
<app-toolbar>
<div main-title>App name</div>
</app-toolbar>
```
Add a button to the left and right side of the toolbar.
```html
<app-toolbar>
<paper-icon-button icon="menu"></paper-icon-button>
<div main-title>App name</div>
<paper-icon-button icon="search"></paper-icon-button>
</app-toolbar>
```
You can use the attributes `top-item` or `bottom-item` to completely fit an element
to the top or bottom of the toolbar respectively.
### Content attributes
Attribute | Description
---------------------|---------------------------------------------------------
`main-title` | The main title element.
`condensed-title` | The title element if used inside a condensed app-header.
`spacer` | Adds a left margin of `64px`.
`bottom-item` | Sticks the element to the bottom of the toolbar.
`top-item` | Sticks the element to the top of the toolbar.
### Styling
Custom property | Description | Default
-----------------------------|------------------------------|-----------------------
`--app-toolbar-font-size` | Toolbar font size | 20px
| PypiClean |
/GeoNode-3.2.0-py3-none-any.whl/geonode/static/geonode/js/ol-2.13/lib/OpenLayers/Control/LayerSwitcher.js | * @requires OpenLayers/Control.js
* @requires OpenLayers/Lang.js
* @requires OpenLayers/Util.js
* @requires OpenLayers/Events/buttonclick.js
*/
/**
* Class: OpenLayers.Control.LayerSwitcher
* The LayerSwitcher control displays a table of contents for the map. This
* allows the user interface to switch between BaseLasyers and to show or hide
* Overlays. By default the switcher is shown minimized on the right edge of
* the map, the user may expand it by clicking on the handle.
*
* To create the LayerSwitcher outside of the map, pass the Id of a html div
* as the first argument to the constructor.
*
* Inherits from:
* - <OpenLayers.Control>
*/
OpenLayers.Control.LayerSwitcher = OpenLayers.Class(OpenLayers.Control, {
/**
* Property: layerStates
* {Array(Object)} Basically a copy of the "state" of the map's layers
* the last time the control was drawn. We have this in order to avoid
* unnecessarily redrawing the control.
*/
layerStates: null,
// DOM Elements
/**
* Property: layersDiv
* {DOMElement}
*/
layersDiv: null,
/**
* Property: baseLayersDiv
* {DOMElement}
*/
baseLayersDiv: null,
/**
* Property: baseLayers
* {Array(Object)}
*/
baseLayers: null,
/**
* Property: dataLbl
* {DOMElement}
*/
dataLbl: null,
/**
* Property: dataLayersDiv
* {DOMElement}
*/
dataLayersDiv: null,
/**
* Property: dataLayers
* {Array(Object)}
*/
dataLayers: null,
/**
* Property: minimizeDiv
* {DOMElement}
*/
minimizeDiv: null,
/**
* Property: maximizeDiv
* {DOMElement}
*/
maximizeDiv: null,
/**
* APIProperty: ascending
* {Boolean}
*/
ascending: true,
/**
* Constructor: OpenLayers.Control.LayerSwitcher
*
* Parameters:
* options - {Object}
*/
initialize: function(options) {
OpenLayers.Control.prototype.initialize.apply(this, arguments);
this.layerStates = [];
},
/**
* APIMethod: destroy
*/
destroy: function() {
//clear out layers info and unregister their events
this.clearLayersArray("base");
this.clearLayersArray("data");
this.map.events.un({
buttonclick: this.onButtonClick,
addlayer: this.redraw,
changelayer: this.redraw,
removelayer: this.redraw,
changebaselayer: this.redraw,
scope: this
});
this.events.unregister("buttonclick", this, this.onButtonClick);
OpenLayers.Control.prototype.destroy.apply(this, arguments);
},
/**
* Method: setMap
*
* Properties:
* map - {<OpenLayers.Map>}
*/
setMap: function(map) {
OpenLayers.Control.prototype.setMap.apply(this, arguments);
this.map.events.on({
addlayer: this.redraw,
changelayer: this.redraw,
removelayer: this.redraw,
changebaselayer: this.redraw,
scope: this
});
if (this.outsideViewport) {
this.events.attachToElement(this.div);
this.events.register("buttonclick", this, this.onButtonClick);
} else {
this.map.events.register("buttonclick", this, this.onButtonClick);
}
},
/**
* Method: draw
*
* Returns:
* {DOMElement} A reference to the DIV DOMElement containing the
* switcher tabs.
*/
draw: function() {
OpenLayers.Control.prototype.draw.apply(this);
// create layout divs
this.loadContents();
// set mode to minimize
if(!this.outsideViewport) {
this.minimizeControl();
}
// populate div with current info
this.redraw();
return this.div;
},
/**
* Method: onButtonClick
*
* Parameters:
* evt - {Event}
*/
onButtonClick: function(evt) {
var button = evt.buttonElement;
if (button === this.minimizeDiv) {
this.minimizeControl();
} else if (button === this.maximizeDiv) {
this.maximizeControl();
} else if (button._layerSwitcher === this.id) {
if (button["for"]) {
button = document.getElementById(button["for"]);
}
if (!button.disabled) {
if (button.type == "radio") {
button.checked = true;
this.map.setBaseLayer(this.map.getLayer(button._layer));
} else {
button.checked = !button.checked;
this.updateMap();
}
}
}
},
/**
* Method: clearLayersArray
* User specifies either "base" or "data". we then clear all the
* corresponding listeners, the div, and reinitialize a new array.
*
* Parameters:
* layersType - {String}
*/
clearLayersArray: function(layersType) {
this[layersType + "LayersDiv"].innerHTML = "";
this[layersType + "Layers"] = [];
},
/**
* Method: checkRedraw
* Checks if the layer state has changed since the last redraw() call.
*
* Returns:
* {Boolean} The layer state changed since the last redraw() call.
*/
checkRedraw: function() {
if ( !this.layerStates.length ||
(this.map.layers.length != this.layerStates.length) ) {
return true;
}
for (var i = 0, len = this.layerStates.length; i < len; i++) {
var layerState = this.layerStates[i];
var layer = this.map.layers[i];
if ( (layerState.name != layer.name) ||
(layerState.inRange != layer.inRange) ||
(layerState.id != layer.id) ||
(layerState.visibility != layer.visibility) ) {
return true;
}
}
return false;
},
/**
* Method: redraw
* Goes through and takes the current state of the Map and rebuilds the
* control to display that state. Groups base layers into a
* radio-button group and lists each data layer with a checkbox.
*
* Returns:
* {DOMElement} A reference to the DIV DOMElement containing the control
*/
redraw: function() {
//if the state hasn't changed since last redraw, no need
// to do anything. Just return the existing div.
if (!this.checkRedraw()) {
return this.div;
}
//clear out previous layers
this.clearLayersArray("base");
this.clearLayersArray("data");
var containsOverlays = false;
var containsBaseLayers = false;
// Save state -- for checking layer if the map state changed.
// We save this before redrawing, because in the process of redrawing
// we will trigger more visibility changes, and we want to not redraw
// and enter an infinite loop.
var len = this.map.layers.length;
this.layerStates = new Array(len);
for (var i=0; i <len; i++) {
var layer = this.map.layers[i];
this.layerStates[i] = {
'name': layer.name,
'visibility': layer.visibility,
'inRange': layer.inRange,
'id': layer.id
};
}
var layers = this.map.layers.slice();
if (!this.ascending) { layers.reverse(); }
for(var i=0, len=layers.length; i<len; i++) {
var layer = layers[i];
var baseLayer = layer.isBaseLayer;
if (layer.displayInLayerSwitcher) {
if (baseLayer) {
containsBaseLayers = true;
} else {
containsOverlays = true;
}
// only check a baselayer if it is *the* baselayer, check data
// layers if they are visible
var checked = (baseLayer) ? (layer == this.map.baseLayer)
: layer.getVisibility();
// create input element
var inputElem = document.createElement("input"),
// The input shall have an id attribute so we can use
// labels to interact with them.
inputId = OpenLayers.Util.createUniqueID(
this.id + "_input_"
);
inputElem.id = inputId;
inputElem.name = (baseLayer) ? this.id + "_baseLayers" : layer.name;
inputElem.type = (baseLayer) ? "radio" : "checkbox";
inputElem.value = layer.name;
inputElem.checked = checked;
inputElem.defaultChecked = checked;
inputElem.className = "olButton";
inputElem._layer = layer.id;
inputElem._layerSwitcher = this.id;
if (!baseLayer && !layer.inRange) {
inputElem.disabled = true;
}
// create span
var labelSpan = document.createElement("label");
// this isn't the DOM attribute 'for', but an arbitrary name we
// use to find the appropriate input element in <onButtonClick>
labelSpan["for"] = inputElem.id;
OpenLayers.Element.addClass(labelSpan, "labelSpan olButton");
labelSpan._layer = layer.id;
labelSpan._layerSwitcher = this.id;
if (!baseLayer && !layer.inRange) {
labelSpan.style.color = "gray";
}
labelSpan.innerHTML = layer.name;
labelSpan.style.verticalAlign = (baseLayer) ? "bottom"
: "baseline";
// create line break
var br = document.createElement("br");
var groupArray = (baseLayer) ? this.baseLayers
: this.dataLayers;
groupArray.push({
'layer': layer,
'inputElem': inputElem,
'labelSpan': labelSpan
});
var groupDiv = (baseLayer) ? this.baseLayersDiv
: this.dataLayersDiv;
groupDiv.appendChild(inputElem);
groupDiv.appendChild(labelSpan);
groupDiv.appendChild(br);
}
}
// if no overlays, dont display the overlay label
this.dataLbl.style.display = (containsOverlays) ? "" : "none";
// if no baselayers, dont display the baselayer label
this.baseLbl.style.display = (containsBaseLayers) ? "" : "none";
return this.div;
},
/**
* Method: updateMap
* Cycles through the loaded data and base layer input arrays and makes
* the necessary calls to the Map object such that that the map's
* visual state corresponds to what the user has selected in
* the control.
*/
updateMap: function() {
// set the newly selected base layer
for(var i=0, len=this.baseLayers.length; i<len; i++) {
var layerEntry = this.baseLayers[i];
if (layerEntry.inputElem.checked) {
this.map.setBaseLayer(layerEntry.layer, false);
}
}
// set the correct visibilities for the overlays
for(var i=0, len=this.dataLayers.length; i<len; i++) {
var layerEntry = this.dataLayers[i];
layerEntry.layer.setVisibility(layerEntry.inputElem.checked);
}
},
/**
* Method: maximizeControl
* Set up the labels and divs for the control
*
* Parameters:
* e - {Event}
*/
maximizeControl: function(e) {
// set the div's width and height to empty values, so
// the div dimensions can be controlled by CSS
this.div.style.width = "";
this.div.style.height = "";
this.showControls(false);
if (e != null) {
OpenLayers.Event.stop(e);
}
},
/**
* Method: minimizeControl
* Hide all the contents of the control, shrink the size,
* add the maximize icon
*
* Parameters:
* e - {Event}
*/
minimizeControl: function(e) {
// to minimize the control we set its div's width
// and height to 0px, we cannot just set "display"
// to "none" because it would hide the maximize
// div
this.div.style.width = "0px";
this.div.style.height = "0px";
this.showControls(true);
if (e != null) {
OpenLayers.Event.stop(e);
}
},
/**
* Method: showControls
* Hide/Show all LayerSwitcher controls depending on whether we are
* minimized or not
*
* Parameters:
* minimize - {Boolean}
*/
showControls: function(minimize) {
this.maximizeDiv.style.display = minimize ? "" : "none";
this.minimizeDiv.style.display = minimize ? "none" : "";
this.layersDiv.style.display = minimize ? "none" : "";
},
/**
* Method: loadContents
* Set up the labels and divs for the control
*/
loadContents: function() {
// layers list div
this.layersDiv = document.createElement("div");
this.layersDiv.id = this.id + "_layersDiv";
OpenLayers.Element.addClass(this.layersDiv, "layersDiv");
this.baseLbl = document.createElement("div");
this.baseLbl.innerHTML = OpenLayers.i18n("Base Layer");
OpenLayers.Element.addClass(this.baseLbl, "baseLbl");
this.baseLayersDiv = document.createElement("div");
OpenLayers.Element.addClass(this.baseLayersDiv, "baseLayersDiv");
this.dataLbl = document.createElement("div");
this.dataLbl.innerHTML = OpenLayers.i18n("Overlays");
OpenLayers.Element.addClass(this.dataLbl, "dataLbl");
this.dataLayersDiv = document.createElement("div");
OpenLayers.Element.addClass(this.dataLayersDiv, "dataLayersDiv");
if (this.ascending) {
this.layersDiv.appendChild(this.baseLbl);
this.layersDiv.appendChild(this.baseLayersDiv);
this.layersDiv.appendChild(this.dataLbl);
this.layersDiv.appendChild(this.dataLayersDiv);
} else {
this.layersDiv.appendChild(this.dataLbl);
this.layersDiv.appendChild(this.dataLayersDiv);
this.layersDiv.appendChild(this.baseLbl);
this.layersDiv.appendChild(this.baseLayersDiv);
}
this.div.appendChild(this.layersDiv);
// maximize button div
var img = OpenLayers.Util.getImageLocation('layer-switcher-maximize.png');
this.maximizeDiv = OpenLayers.Util.createAlphaImageDiv(
"OpenLayers_Control_MaximizeDiv",
null,
null,
img,
"absolute");
OpenLayers.Element.addClass(this.maximizeDiv, "maximizeDiv olButton");
this.maximizeDiv.style.display = "none";
this.div.appendChild(this.maximizeDiv);
// minimize button div
var img = OpenLayers.Util.getImageLocation('layer-switcher-minimize.png');
this.minimizeDiv = OpenLayers.Util.createAlphaImageDiv(
"OpenLayers_Control_MinimizeDiv",
null,
null,
img,
"absolute");
OpenLayers.Element.addClass(this.minimizeDiv, "minimizeDiv olButton");
this.minimizeDiv.style.display = "none";
this.div.appendChild(this.minimizeDiv);
},
CLASS_NAME: "OpenLayers.Control.LayerSwitcher"
}); | PypiClean |
/ClueDojo-1.4.3-1.tar.gz/ClueDojo-1.4.3-1/src/cluedojo/static/dojo/nls/de/colors.js | ({"lightsteelblue":"Helles Stahlblau","orangered":"Orangerot","midnightblue":"Mitternachtblau","cadetblue":"Kadettenblau","seashell":"Muschelweiß","slategrey":"Schiefergrau","coral":"Koralle","darkturquoise":"Dunkeltürkis","antiquewhite":"Antikweiß","mediumspringgreen":"Mittelfrühlingsgrün","salmon":"Lachs","darkgrey":"Dunkelgrau","ivory":"Elfenbein","greenyellow":"Grüngelb","mistyrose":"Blassrose","lightsalmon":"Helllachs","silver":"Silbergrau","dimgrey":"Blassgrau","orange":"Orange","white":"Weiß","navajowhite":"Navajo-weiß","royalblue":"Königsblau","deeppink":"Tiefrosa","lime":"Limone","oldlace":"Alte Spitze","chartreuse":"Helles Gelbgrün","darkcyan":"Dunkelzyan","yellow":"Gelb","linen":"Leinen","olive":"Oliv","gold":"Gold","lawngreen":"Grasgrün","lightyellow":"Hellgelb","tan":"Hautfarben","darkviolet":"Dunkelviolett","lightslategrey":"Helles Schiefergrau","grey":"Grau","darkkhaki":"Dunkelkhaki","green":"Grün","deepskyblue":"Dunkles Himmelblau","aqua":"Wasserblau","sienna":"Sienna","mintcream":"Mintcreme","rosybrown":"Rosigbraun","mediumslateblue":"Mittelschieferblau ","magenta":"Magenta","lightseagreen":"Helles Meergrün","cyan":"Zyan","olivedrab":"Olivgrau","darkgoldenrod":"Dunkelgoldgelb","slateblue":"Schieferblau","mediumaquamarine":"Mittelaquamarin","lavender":"Lavendelblau","mediumseagreen":"Mittelmeeresgrün","maroon":"Kastanienbraun","darkslategray":"Dunkelschiefergrau","mediumturquoise":"Mitteltürkis ","ghostwhite":"Geisterweiß","darkblue":"Dunkelblau","mediumvioletred":"Mittelviolettrot ","brown":"Braun","lightgray":"Hellgrau","sandybrown":"Sandbraun","pink":"Rosa","firebrick":"Schamottestein","indigo":"Indigoblau","snow":"Schneeweiß","darkorchid":"Dunkelorchidee","turquoise":"Türkis","chocolate":"Schokoladenbraun","springgreen":"Frühlingsgrün","moccasin":"Mokassin","navy":"Marineblau","lemonchiffon":"Zitronenchiffon","teal":"Smaragdgrün","floralwhite":"Blütenweiß","cornflowerblue":"Kornblumenblau","paleturquoise":"Blasstürkis","purple":"Purpurrot","gainsboro":"Gainsboro","plum":"Pflaume","red":"Rot","blue":"Blau","forestgreen":"Forstgrün","darkgreen":"Dunkelgrün","honeydew":"Honigtau","darkseagreen":"Dunkles Meergrün","lightcoral":"Hellkoralle","palevioletred":"Blassviolettrot ","mediumpurple":"Mittelpurpur","saddlebrown":"Sattelbraun","darkmagenta":"Dunkelmagenta","thistle":"Distel","whitesmoke":"Rauchweiß","wheat":"Weizen","violet":"Violett","lightskyblue":"Helles Himmelblau","goldenrod":"Goldgelb","mediumblue":"Mittelblau","skyblue":"Himmelblau","crimson":"Karmesinrot","darksalmon":"Dunkellachs","darkred":"Dunkelrot","darkslategrey":"Dunkelschiefergrau","peru":"Peru","lightgrey":"Hellgrau","lightgoldenrodyellow":"Hellgoldgelb","blanchedalmond":"Mandelweiß","aliceblue":"Alice-blau","bisque":"Bisquit","slategray":"Schiefergrau","palegoldenrod":"Blassgoldgelb","darkorange":"Dunkelorange","aquamarine":"Aquamarin","lightgreen":"Hellgrün","burlywood":"Burlywood","dodgerblue":"Dodger-blau","darkgray":"Dunkelgrau","lightcyan":"Hellzyan","powderblue":"Pulverblau","blueviolet":"Blauviolett","orchid":"Orchidee","dimgray":"Blassgrau","beige":"Beige","fuchsia":"Fuchsia","lavenderblush":"Lavendelhauch","hotpink":"Knallrosa","steelblue":"Stahlblau","tomato":"Tomatenrot","lightpink":"Hellrosa","limegreen":"Limonengrün","indianred":"Indischrot","papayawhip":"Papayacreme","lightslategray":"Helles Schiefergrau","gray":"Grau","mediumorchid":"Mittelorchidee","cornsilk":"Kornseide","black":"Schwarz","seagreen":"Meeresgrün","darkslateblue":"Dunkelschieferblau","khaki":"Khaki","lightblue":"Hellblau","palegreen":"Blassgrün","azure":"Azur","peachpuff":"Pfirsich","darkolivegreen":"Dunkelolivgrün","yellowgreen":"Gelbgrün"}) | PypiClean |
/Cython-3.0.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl/pyximport/_pyximport3.py | import glob
import importlib
import os
import sys
from importlib.abc import MetaPathFinder
from importlib.machinery import ExtensionFileLoader, SourceFileLoader
from importlib.util import spec_from_file_location
mod_name = "pyximport"
PY_EXT = ".py"
PYX_EXT = ".pyx"
PYXDEP_EXT = ".pyxdep"
PYXBLD_EXT = ".pyxbld"
DEBUG_IMPORT = False
def _print(message, args):
if args:
message = message % args
print(message)
def _debug(message, *args):
if DEBUG_IMPORT:
_print(message, args)
def _info(message, *args):
_print(message, args)
def load_source(file_path):
import importlib.util
from importlib.machinery import SourceFileLoader
spec = importlib.util.spec_from_file_location("XXXX", file_path, loader=SourceFileLoader("XXXX", file_path))
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
return module
def get_distutils_extension(modname, pyxfilename, language_level=None):
# try:
# import hashlib
# except ImportError:
# import md5 as hashlib
# extra = "_" + hashlib.md5(open(pyxfilename).read()).hexdigest()
# modname = modname + extra
extension_mod,setup_args = handle_special_build(modname, pyxfilename)
if not extension_mod:
if not isinstance(pyxfilename, str):
# distutils is stupid in Py2 and requires exactly 'str'
# => encode accidentally coerced unicode strings back to str
pyxfilename = pyxfilename.encode(sys.getfilesystemencoding())
from distutils.extension import Extension
extension_mod = Extension(name = modname, sources=[pyxfilename])
if language_level is not None:
extension_mod.cython_directives = {'language_level': language_level}
return extension_mod,setup_args
def handle_special_build(modname, pyxfilename):
special_build = os.path.splitext(pyxfilename)[0] + PYXBLD_EXT
ext = None
setup_args={}
if os.path.exists(special_build):
# globls = {}
# locs = {}
# execfile(special_build, globls, locs)
# ext = locs["make_ext"](modname, pyxfilename)
mod = load_source(special_build)
make_ext = getattr(mod,'make_ext',None)
if make_ext:
ext = make_ext(modname, pyxfilename)
assert ext and ext.sources, "make_ext in %s did not return Extension" % special_build
make_setup_args = getattr(mod, 'make_setup_args',None)
if make_setup_args:
setup_args = make_setup_args()
assert isinstance(setup_args,dict), ("make_setup_args in %s did not return a dict"
% special_build)
assert set or setup_args, ("neither make_ext nor make_setup_args %s"
% special_build)
ext.sources = [os.path.join(os.path.dirname(special_build), source)
for source in ext.sources]
return ext, setup_args
def handle_dependencies(pyxfilename):
testing = '_test_files' in globals()
dependfile = os.path.splitext(pyxfilename)[0] + PYXDEP_EXT
# by default let distutils decide whether to rebuild on its own
# (it has a better idea of what the output file will be)
# but we know more about dependencies so force a rebuild if
# some of the dependencies are newer than the pyxfile.
if os.path.exists(dependfile):
with open(dependfile) as fid:
depends = fid.readlines()
depends = [depend.strip() for depend in depends]
# gather dependencies in the "files" variable
# the dependency file is itself a dependency
files = [dependfile]
for depend in depends:
fullpath = os.path.join(os.path.dirname(dependfile),
depend)
files.extend(glob.glob(fullpath))
# only for unit testing to see we did the right thing
if testing:
_test_files[:] = [] #$pycheck_no
# if any file that the pyxfile depends upon is newer than
# the pyx file, 'touch' the pyx file so that distutils will
# be tricked into rebuilding it.
for file in files:
from distutils.dep_util import newer
if newer(file, pyxfilename):
_debug("Rebuilding %s because of %s", pyxfilename, file)
filetime = os.path.getmtime(file)
os.utime(pyxfilename, (filetime, filetime))
if testing:
_test_files.append(file)
def build_module(name, pyxfilename, pyxbuild_dir=None, inplace=False, language_level=None):
assert os.path.exists(pyxfilename), "Path does not exist: %s" % pyxfilename
handle_dependencies(pyxfilename)
extension_mod, setup_args = get_distutils_extension(name, pyxfilename, language_level)
build_in_temp = pyxargs.build_in_temp
sargs = pyxargs.setup_args.copy()
sargs.update(setup_args)
build_in_temp = sargs.pop('build_in_temp',build_in_temp)
from . import pyxbuild
olddir = os.getcwd()
common = ''
if pyxbuild_dir:
# Windows concantenates the pyxbuild_dir to the pyxfilename when
# compiling, and then complains that the filename is too long
common = os.path.commonprefix([pyxbuild_dir, pyxfilename])
if len(common) > 30:
pyxfilename = os.path.relpath(pyxfilename)
pyxbuild_dir = os.path.relpath(pyxbuild_dir)
os.chdir(common)
try:
so_path = pyxbuild.pyx_to_dll(pyxfilename, extension_mod,
build_in_temp=build_in_temp,
pyxbuild_dir=pyxbuild_dir,
setup_args=sargs,
inplace=inplace,
reload_support=pyxargs.reload_support)
finally:
os.chdir(olddir)
so_path = os.path.join(common, so_path)
assert os.path.exists(so_path), "Cannot find: %s" % so_path
junkpath = os.path.join(os.path.dirname(so_path), name+"_*") #very dangerous with --inplace ? yes, indeed, trying to eat my files ;)
junkstuff = glob.glob(junkpath)
for path in junkstuff:
if path != so_path:
try:
os.remove(path)
except IOError:
_info("Couldn't remove %s", path)
return so_path
# import hooks
class PyxImportMetaFinder(MetaPathFinder):
def __init__(self, extension=PYX_EXT, pyxbuild_dir=None, inplace=False, language_level=None):
self.pyxbuild_dir = pyxbuild_dir
self.inplace = inplace
self.language_level = language_level
self.extension = extension
def find_spec(self, fullname, path, target=None):
if not path:
path = [os.getcwd()] # top level import --
if "." in fullname:
*parents, name = fullname.split(".")
else:
name = fullname
for entry in path:
if os.path.isdir(os.path.join(entry, name)):
# this module has child modules
filename = os.path.join(entry, name, "__init__" + self.extension)
submodule_locations = [os.path.join(entry, name)]
else:
filename = os.path.join(entry, name + self.extension)
submodule_locations = None
if not os.path.exists(filename):
continue
return spec_from_file_location(
fullname, filename,
loader=PyxImportLoader(filename, self.pyxbuild_dir, self.inplace, self.language_level),
submodule_search_locations=submodule_locations)
return None # we don't know how to import this
class PyImportMetaFinder(MetaPathFinder):
def __init__(self, extension=PY_EXT, pyxbuild_dir=None, inplace=False, language_level=None):
self.pyxbuild_dir = pyxbuild_dir
self.inplace = inplace
self.language_level = language_level
self.extension = extension
self.uncompilable_modules = {}
self.blocked_modules = ['Cython', 'pyxbuild', 'pyximport.pyxbuild',
'distutils', 'cython']
self.blocked_packages = ['Cython.', 'distutils.']
def find_spec(self, fullname, path, target=None):
if fullname in sys.modules:
return None
if any([fullname.startswith(pkg) for pkg in self.blocked_packages]):
return None
if fullname in self.blocked_modules:
# prevent infinite recursion
return None
self.blocked_modules.append(fullname)
name = fullname
if not path:
path = [os.getcwd()] # top level import --
try:
for entry in path:
if os.path.isdir(os.path.join(entry, name)):
# this module has child modules
filename = os.path.join(entry, name, "__init__" + self.extension)
submodule_locations = [os.path.join(entry, name)]
else:
filename = os.path.join(entry, name + self.extension)
submodule_locations = None
if not os.path.exists(filename):
continue
return spec_from_file_location(
fullname, filename,
loader=PyxImportLoader(filename, self.pyxbuild_dir, self.inplace, self.language_level),
submodule_search_locations=submodule_locations)
finally:
self.blocked_modules.pop()
return None # we don't know how to import this
class PyxImportLoader(ExtensionFileLoader):
def __init__(self, filename, pyxbuild_dir, inplace, language_level):
module_name = os.path.splitext(os.path.basename(filename))[0]
super().__init__(module_name, filename)
self._pyxbuild_dir = pyxbuild_dir
self._inplace = inplace
self._language_level = language_level
def create_module(self, spec):
try:
so_path = build_module(spec.name, pyxfilename=spec.origin, pyxbuild_dir=self._pyxbuild_dir,
inplace=self._inplace, language_level=self._language_level)
self.path = so_path
spec.origin = so_path
return super().create_module(spec)
except Exception as failure_exc:
_debug("Failed to load extension module: %r" % failure_exc)
if pyxargs.load_py_module_on_import_failure and spec.origin.endswith(PY_EXT):
spec = importlib.util.spec_from_file_location(spec.name, spec.origin,
loader=SourceFileLoader(spec.name, spec.origin))
mod = importlib.util.module_from_spec(spec)
assert mod.__file__ in (spec.origin, spec.origin + 'c', spec.origin + 'o'), (mod.__file__, spec.origin)
return mod
else:
tb = sys.exc_info()[2]
import traceback
exc = ImportError("Building module %s failed: %s" % (
spec.name, traceback.format_exception_only(*sys.exc_info()[:2])))
raise exc.with_traceback(tb)
def exec_module(self, module):
try:
return super().exec_module(module)
except Exception as failure_exc:
import traceback
_debug("Failed to load extension module: %r" % failure_exc)
raise ImportError("Executing module %s failed %s" % (
module.__file__, traceback.format_exception_only(*sys.exc_info()[:2])))
#install args
class PyxArgs(object):
build_dir=True
build_in_temp=True
setup_args={} #None
def _have_importers():
has_py_importer = False
has_pyx_importer = False
for importer in sys.meta_path:
if isinstance(importer, PyxImportMetaFinder):
if isinstance(importer, PyImportMetaFinder):
has_py_importer = True
else:
has_pyx_importer = True
return has_py_importer, has_pyx_importer
def install(pyximport=True, pyimport=False, build_dir=None, build_in_temp=True,
setup_args=None, reload_support=False,
load_py_module_on_import_failure=False, inplace=False,
language_level=None):
""" Main entry point for pyxinstall.
Call this to install the ``.pyx`` import hook in
your meta-path for a single Python process. If you want it to be
installed whenever you use Python, add it to your ``sitecustomize``
(as described above).
:param pyximport: If set to False, does not try to import ``.pyx`` files.
:param pyimport: You can pass ``pyimport=True`` to also
install the ``.py`` import hook
in your meta-path. Note, however, that it is rather experimental,
will not work at all for some ``.py`` files and packages, and will
heavily slow down your imports due to search and compilation.
Use at your own risk.
:param build_dir: By default, compiled modules will end up in a ``.pyxbld``
directory in the user's home directory. Passing a different path
as ``build_dir`` will override this.
:param build_in_temp: If ``False``, will produce the C files locally. Working
with complex dependencies and debugging becomes more easy. This
can principally interfere with existing files of the same name.
:param setup_args: Dict of arguments for Distribution.
See ``distutils.core.setup()``.
:param reload_support: Enables support for dynamic
``reload(my_module)``, e.g. after a change in the Cython code.
Additional files ``<so_path>.reloadNN`` may arise on that account, when
the previously loaded module file cannot be overwritten.
:param load_py_module_on_import_failure: If the compilation of a ``.py``
file succeeds, but the subsequent import fails for some reason,
retry the import with the normal ``.py`` module instead of the
compiled module. Note that this may lead to unpredictable results
for modules that change the system state during their import, as
the second import will rerun these modifications in whatever state
the system was left after the import of the compiled module
failed.
:param inplace: Install the compiled module
(``.so`` for Linux and Mac / ``.pyd`` for Windows)
next to the source file.
:param language_level: The source language level to use: 2 or 3.
The default is to use the language level of the current Python
runtime for .py files and Py2 for ``.pyx`` files.
"""
if setup_args is None:
setup_args = {}
if not build_dir:
build_dir = os.path.join(os.path.expanduser('~'), '.pyxbld')
global pyxargs
pyxargs = PyxArgs() #$pycheck_no
pyxargs.build_dir = build_dir
pyxargs.build_in_temp = build_in_temp
pyxargs.setup_args = (setup_args or {}).copy()
pyxargs.reload_support = reload_support
pyxargs.load_py_module_on_import_failure = load_py_module_on_import_failure
has_py_importer, has_pyx_importer = _have_importers()
py_importer, pyx_importer = None, None
if pyimport and not has_py_importer:
py_importer = PyImportMetaFinder(pyxbuild_dir=build_dir, inplace=inplace,
language_level=language_level)
# make sure we import Cython before we install the import hook
import Cython.Compiler.Main, Cython.Compiler.Pipeline, Cython.Compiler.Optimize
sys.meta_path.insert(0, py_importer)
if pyximport and not has_pyx_importer:
pyx_importer = PyxImportMetaFinder(pyxbuild_dir=build_dir, inplace=inplace,
language_level=language_level)
sys.meta_path.append(pyx_importer)
return py_importer, pyx_importer
def uninstall(py_importer, pyx_importer):
"""
Uninstall an import hook.
"""
try:
sys.meta_path.remove(py_importer)
except ValueError:
pass
try:
sys.meta_path.remove(pyx_importer)
except ValueError:
pass
# MAIN
def show_docs():
import __main__
__main__.__name__ = mod_name
for name in dir(__main__):
item = getattr(__main__, name)
try:
setattr(item, "__module__", mod_name)
except (AttributeError, TypeError):
pass
help(__main__)
if __name__ == '__main__':
show_docs() | PypiClean |
/Fict-1.0.1.tar.gz/Fict-1.0.1/lib/core.py | import json
import logging
import os
import re
import sys
import threading
from alive_progress import alive_bar, alive_it
from joblib import Parallel, delayed
from lib.fileobj import FileObj
FILE_IGNORE_LIST = ['.fict', 'fict_db', '@eaDir']
LOGGER = logging.getLogger('fict')
counter = 1000
COUNTER_LOCK = threading.Lock()
FILE_LOCK = threading.Lock()
def write_db(args):
"""Write the json database down to disk."""
data = json.dumps([obj.dump() for path, obj in FileObj.instances.items()],
sort_keys=False, indent=4)
db_file = os.path.abspath('{}/{}'.format(args['--fict-dir'], args['--fict-db-name']))
LOGGER.debug("writing out db @ %s", db_file)
try:
with open(db_file, 'w') as json_db:
json_db.write(data)
except FileNotFoundError:
LOGGER.error('Could not write to: %s', db_file)
def read_db(args):
"""Read the json database from disk in read only mode"""
db_file = os.path.abspath('{}/{}'.format(args['--fict-dir'], args['--fict-db-name']))
LOGGER.debug("reading db: %s", db_file)
if os.path.isfile(db_file):
with open(db_file, 'r') as json_db:
try:
data = json.load(json_db)
return data
except ValueError:
return json.loads('[]')
else:
return json.loads('[]')
def init(args):
"""Initialize Fict project"""
path = args['--fict-dir']
if not os.path.isdir(path):
os.makedirs(path, exist_ok=True)
LOGGER.info("FICT DB created at: %s", path)
else:
sys.exit("FICT DB already exists at: {}".format(path))
def walkfs(path):
""" WalkFS file generator """
for root, directories, filenames in os.walk(path):
for directory in directories:
yield(('directory', os.path.join(os.path.abspath(root), directory)))
for filename in filenames:
yield(('file', os.path.join(os.path.abspath(root), filename)))
def file_already_exist(path):
""" Is the path already represented in FileObj """
return bool(any(path == opath for opath, _ in FileObj.instances.items()))
def ignorable_file(path):
""" Is the path in the ingnored file list? """
return bool(any(path.__contains__(pattern) for pattern in FILE_IGNORE_LIST))
def add(args):
"""Create new instances of FileObjs"""
LOGGER.debug("Adding path: %s", args['<path>'])
if os.path.isfile(args['<path>']) and not ignorable_file(args['<path>']):
FileObj('file', args['<path>'], args['--hash-tool'], args['--default-hash-tool'])
elif os.path.isdir(args['<path>']):
for filetype, path in walkfs(args['<path>']):
if not (ignorable_file(path) or file_already_exist(path)):
FileObj(filetype, path, args['--hash-tool'], args['--default-hash-tool'])
LOGGER.debug("Adding: %s (%s)", path, filetype)
else:
LOGGER.debug("Ignored/AlreadyAdded file: %s", path)
else:
sys.exit('Not a valid path for add')
def compute_runner(obj, args):
""" The computation that happens per thread as dished out by the compute function. """
global counter
update_file = False
with COUNTER_LOCK:
counter -= 1
if counter == 0:
counter = 1000
update_file = True
if args['--recompute']:
obj.set_status('pending')
if obj.get_status() == 'pending':
obj.set_hash()
LOGGER.debug("\t - blake2: %s \n\t - %s: %s", obj.get_default_hash(), obj.get_hash_bin(), obj.get_hash())
if update_file:
with FILE_LOCK:
write_db(args)
else:
LOGGER.debug("Checksum already set for file %s", obj.get_path())
def compute(args):
""" Compute hashes of all instances in FileObj.instances """
# The alive_it helper here automatically gets a count and gives us a progress bar.
# The progress bar is not always perfect.
#
bar_instances = alive_it(FileObj.instances.items(), enrich_print=False)
# It's important to use prefer="threads" here as not using it uses processes and there's no ipc.
# Here we use n_jobs=-2 as to ask the system for an acceptable number based on cpu. Using a higher number just
# creates high cpu time for iowait and software interrupts.
#
Parallel(n_jobs=-2, prefer="threads")(delayed(compute_runner)(obj, args) for _, obj in bar_instances)
def get_list():
""" Print list of all files and their hashes managed by Fict """
[LOGGER.info(obj.get_tuple()) for path, obj in FileObj.instances.items()]
def searched_instances(args):
"""Search instances in FileObj.instances.items() and return the ones that don't match args['<path>']"""
re_pattern = re.compile('^{}'.format(args['<path>']))
filtered_objects = [(path, obj) for path, obj in FileObj.instances.items() if re_pattern.match(obj.path)]
LOGGER.debug("%s of %s total instances match inputted pattern '%s'", len(filtered_objects), len(FileObj.instances.items()), args['<path>'])
if len(filtered_objects) > 0:
return filtered_objects
return FileObj.instances.items()
def check(args):
""" Check Checksums for all files """
instances = searched_instances(args)
with alive_bar(len(instances), enrich_print=False) as bar:
for _, obj in instances:
if obj.status not in "computed":
continue
if not obj.check_integrity(mode='standard'):
LOGGER.error('std_FAIL[%s]: %s', obj.default_hash_bin, obj.path)
if not obj.check_integrity(mode='secondary'):
LOGGER.error('2nd_FAIL[%s]: %s', obj.hash_bin, obj.path)
else:
LOGGER.info('%s: \n\tPassed secondary integrity check (%s) but failed first (%s)', obj.path, obj.hash_bin, obj.default_hash_bin)
else:
LOGGER.debug('PASS[%s]: %s', obj.default_hash_bin, obj.path)
bar() #call bar function to increment progress bar
def status():
""" Printout the status """
pending, computed, percent, bad = 0, 0, 0, 0
for path, obj in FileObj.instances.items():
_, o_status, _ = obj.get_tuple()
if o_status in 'pending':
pending += 1
elif o_status in 'computed':
computed += 1
else:
LOGGER.error("Bad Data found, please check file: %s, %s", path, o_status)
bad += 1
LOGGER.info("Pending Files: %s", pending)
LOGGER.info("Computed Files: %s", computed)
try:
percent = round(computed/(computed + pending) * 100, 2)
except ZeroDivisionError:
LOGGER.info("Computed %%: %s%%", 0)
else:
LOGGER.info("Computed %%: %s%%", percent)
if bad > 0:
LOGGER.error("Bad Data: %s", bad)
def construct(args):
"""Reinitialize instances of FileObj via read_db"""
try:
for obj in read_db(args):
FileObj.load(obj)
except KeyError as error:
sys.exit('JSON Key {} expected/unexpected in your fict_db. Check FileObJ schema'.format(error))
except:
LOGGER.error('fict_db reading exception: %s', sys.exc_info()[0])
raise
def setup_logging(args):
"""Configure Logging to console"""
LOGGER.addHandler(logging.StreamHandler())
LOGGER.setLevel(logging.INFO)
if args['--verbose']:
LOGGER.setLevel(logging.DEBUG)
LOGGER.debug("Logging Level set to %s", logging.getLevelName(LOGGER.getEffectiveLevel()))
LOGGER.debug(args)
def main(args):
""" Main Function """
#Initialization
setup_logging(args)
if args['init']:
init(args)
elif not os.path.isdir(args['--fict-dir']):
sys.exit("You must initialize a fict project first, 'fict init'")
# Construct FileObj instances
construct(args)
# Conditional operations after initialization and construction.
if args['add']:
add(args)
write_db(args)
elif args['compute']:
compute(args)
write_db(args)
elif args['list']:
get_list()
sys.exit()
elif args['check']:
check(args)
sys.exit()
elif args['status']:
status()
sys.exit() | PypiClean |
/DXC-RL-1.0.3.5.tar.gz/DXC-RL-1.0.3.5/dxc/ai/clean_data/clean_data.py | import pandas as pd
import janitor #data cleaning
from ftfy import fix_text #data cleaning
import nltk #data cleaning
nltk.download('punkt') #data cleaning
import scrubadub #data cleaning
import arrow #normalizing dates
import numpy as np
from sklearn.base import TransformerMixin
from sklearn.impute import KNNImputer ##using KNN as imputer for categorical fields
from sklearn.preprocessing import OrdinalEncoder ##Ordinal encoder is being used for encoding categorical objects
from dxc.ai.global_variables import globals_file
from datetime import datetime
def encode(data):
'''function to encode non-null data and replace it in the original data'''
encoder = OrdinalEncoder()
#retains only non-null values
nonulls = np.array(data.dropna())
#reshapes the data for encoding
impute_reshape = nonulls.reshape(-1,1)
#encode date
impute_ordinal = encoder.fit_transform(impute_reshape)
#encoders_store[column_name]=encoder
#Assign back encoded values to non-null values
data.loc[data.notnull()] = np.squeeze(impute_ordinal)
return (data,encoder)
def impute_df(df):
# imputer = KNN()
imputer = KNNImputer(n_neighbors=2)
object_types = list(df.select_dtypes(include=['object']).columns)
num_types = list(set(df.columns) - set(object_types))
encoders_store={}
for column in num_types:
skew=df[column].skew()
if (-1 < skew < 1):
df[column]=df[column].fillna(df[column].mean())
else :
df[column]=df[column].fillna(df[column].median())
#create a for loop to iterate through each column in the data
for columns in object_types:
new=encode(df[columns])
encoders_store[columns]=new[1]
imputed_data = pd.DataFrame(np.round(imputer.fit_transform(df)),columns = df.columns)
for columns in object_types:
imputed_data[columns]=encoders_store[columns].inverse_transform(np.array(imputed_data[columns]).reshape(-1,1))
return imputed_data
#CLEANING FILE
def clean_dataframe(df, impute = False, text_fields = [], date_fields = [], numeric_fields = [], categorical_fields = []):
clean_df = (
df
#make the column names lower case and remove spaces
.clean_names()
#remove empty columns
.remove_empty()
#remove empty rows and columns
.dropna(how='all')
)
#remove harmful characters. remove personal identifiers. make lowercase
for field in text_fields:
field = '_'.join(field.split()).lower()
clean_df[field] = clean_df[field].fillna(' ').apply(fix_text)
clean_df[field] = clean_df[field].apply(scrubadub.clean, replace_with='identifier')
clean_df[field] = clean_df[field].str.lower()
#impute missing values
if impute:
clean_df = impute_df(clean_df)
#standardize the format of all date fields
for field in date_fields:
field = '_'.join(field.split()).lower()
try:
clean_df[field] = clean_df[field].apply(arrow.get)
except ParserError:
try:
clean_df[field] = clean_df[field].apply(lambda x: arrow.get(datetime.strptime(x,'%m/%d/%Y')))
except ValueError:
clean_df[field] = clean_df[field].apply(lambda x: arrow.get(datetime.strptime(x,'%m/%d/%y')))
#make sure all numeric fields have the proper data type
for field in numeric_fields:
field = '_'.join(field.split()).lower()
clean_df[field] = pd.to_numeric(clean_df[field])
#make sure all categorical variables have the proper data type
for field in categorical_fields:
field = '_'.join(field.split()).lower()
clean_df[field] = clean_df[field].astype('category')
clean_df=clean_df.clean_names()
globals_file.clean_data_used = True
return(clean_df) | PypiClean |
/GxSphinx-1.0.0.tar.gz/GxSphinx-1.0.0/doc/usage/extensions/math.rst | .. highlight:: rest
.. _math-support:
Math support for HTML outputs in Sphinx
=======================================
.. module:: sphinx.ext.mathbase
:synopsis: Common math support for imgmath and mathjax / jsmath.
.. versionadded:: 0.5
.. versionchanged:: 1.8
Math support for non-HTML builders is integrated to sphinx-core.
So mathbase extension is no longer needed.
Since mathematical notation isn't natively supported by HTML in any way, Sphinx
gives a math support to HTML document with several extensions. These use the
reStructuredText math :rst:dir:`directive <math>` and :rst:role:`role <math>`.
:mod:`sphinx.ext.imgmath` -- Render math as images
--------------------------------------------------
.. module:: sphinx.ext.imgmath
:synopsis: Render math as PNG or SVG images.
.. versionadded:: 1.4
This extension renders math via LaTeX and dvipng_ or dvisvgm_ into PNG or SVG
images. This of course means that the computer where the docs are built must
have both programs available.
There are various configuration values you can set to influence how the images
are built:
.. confval:: imgmath_image_format
The output image format. The default is ``'png'``. It should be either
``'png'`` or ``'svg'``. The image is produced by first executing ``latex``
on the TeX mathematical mark-up then (depending on the requested format)
either `dvipng`_ or `dvisvgm`_.
.. confval:: imgmath_use_preview
``dvipng`` and ``dvisvgm`` both have the ability to collect from LaTeX the
"depth" of the rendered math: an inline image should use this "depth" in a
``vertical-align`` style to get correctly aligned with surrounding text.
This mechanism requires the `LaTeX preview package`_ (available as
``preview-latex-style`` on Ubuntu xenial). Therefore, the default for this
option is ``False`` but it is strongly recommended to set it to ``True``.
.. versionchanged:: 2.2
This option can be used with the ``'svg'`` :confval:`imgmath_image_format`.
.. confval:: imgmath_add_tooltips
Default: ``True``. If false, do not add the LaTeX code as an "alt" attribute
for math images.
.. confval:: imgmath_font_size
The font size (in ``pt``) of the displayed math. The default value is
``12``. It must be a positive integer.
.. confval:: imgmath_latex
The command name with which to invoke LaTeX. The default is ``'latex'``; you
may need to set this to a full path if ``latex`` is not in the executable
search path.
Since this setting is not portable from system to system, it is normally not
useful to set it in ``conf.py``; rather, giving it on the
:program:`sphinx-build` command line via the :option:`-D <sphinx-build -D>`
option should be preferable, like this::
sphinx-build -b html -D imgmath_latex=C:\tex\latex.exe . _build/html
This value should only contain the path to the latex executable, not further
arguments; use :confval:`imgmath_latex_args` for that purpose.
.. hint::
Some fancy LaTeX mark-up (an example was reported which used TikZ to add
various decorations to the equation) require multiple runs of the LaTeX
executable. To handle this, set this configuration setting to
``'latexmk'`` (or a full path to it) as this Perl script reliably
chooses dynamically how many latex runs are needed.
.. confval:: imgmath_latex_args
Additional arguments to give to latex, as a list. The default is an empty
list.
.. confval:: imgmath_latex_preamble
Additional LaTeX code to put into the preamble of the LaTeX files used to
translate the math snippets. This is left empty by default. Use it
e.g. to add packages which modify the fonts used for math, such as
``'\\usepackage{newtxsf}'`` for sans-serif fonts, or
``'\\usepackage{fouriernc}'`` for serif fonts. Indeed, the default LaTeX
math fonts have rather thin glyphs which (in HTML output) often do not
match well with the font for text.
.. confval:: imgmath_dvipng
The command name to invoke ``dvipng``. The default is
``'dvipng'``; you may need to set this to a full path if ``dvipng`` is not in
the executable search path. This option is only used when
``imgmath_image_format`` is set to ``'png'``.
.. confval:: imgmath_dvipng_args
Additional arguments to give to dvipng, as a list. The default value is
``['-gamma', '1.5', '-D', '110', '-bg', 'Transparent']`` which makes the
image a bit darker and larger then it is by default (this compensates
somewhat for the thinness of default LaTeX math fonts), and produces PNGs with a
transparent background. This option is used only when
``imgmath_image_format`` is ``'png'``.
.. confval:: imgmath_dvisvgm
The command name to invoke ``dvisvgm``. The default is
``'dvisvgm'``; you may need to set this to a full path if ``dvisvgm`` is not
in the executable search path. This option is only used when
``imgmath_image_format`` is ``'svg'``.
.. confval:: imgmath_dvisvgm_args
Additional arguments to give to dvisvgm, as a list. The default value is
``['--no-fonts']``, which means that ``dvisvgm`` will render glyphs as path
elements (cf the `dvisvgm FAQ`_). This option is used only when
``imgmath_image_format`` is ``'svg'``.
:mod:`sphinx.ext.mathjax` -- Render math via JavaScript
-------------------------------------------------------
.. module:: sphinx.ext.mathjax
:synopsis: Render math using JavaScript via MathJax.
.. versionadded:: 1.1
This extension puts math as-is into the HTML files. The JavaScript package
MathJax_ is then loaded and transforms the LaTeX markup to readable math live in
the browser.
Because MathJax (and the necessary fonts) is very large, it is not included in
Sphinx but is set to automatically include it from a third-party site.
.. attention::
You should use the math :rst:dir:`directive <math>` and
:rst:role:`role <math>`, not the native MathJax ``$$``, ``\(``, etc.
.. confval:: mathjax_path
The path to the JavaScript file to include in the HTML files in order to load
MathJax.
The default is the ``https://`` URL that loads the JS files from the
`cdnjs`__ Content Delivery Network. See the `MathJax Getting Started
page`__ for details. If you want MathJax to be available offline or
without including resources from a third-party site, you have to
download it and set this value to a different path.
__ https://cdnjs.com
__ https://docs.mathjax.org/en/latest/start.html
The path can be absolute or relative; if it is relative, it is relative to
the ``_static`` directory of the built docs.
For example, if you put MathJax into the static path of the Sphinx docs, this
value would be ``MathJax/MathJax.js``. If you host more than one Sphinx
documentation set on one server, it is advisable to install MathJax in a
shared location.
You can also give a full ``https://`` URL different from the CDN URL.
.. confval:: mathjax_options
The options to script tag for mathjax. For example, you can set integrity
option with following setting::
mathjax_options = {
'integrity': 'sha384-......',
}
The default is empty (``{}``).
.. versionadded:: 1.8
.. confval:: mathjax_config
The inline configuration options for mathjax. The value is used as a
parameter of ``MathJax.Hub.Config()``. For more information, please
read `Using in-line configuration options`_.
For example::
mathjax_config = {
'extensions': ['tex2jax.js'],
'jax': ['input/TeX', 'output/HTML-CSS'],
}
The default is empty (not configured).
.. versionadded:: 1.8
.. _Using in-line configuration options: https://docs.mathjax.org/en/latest/configuration.html#using-in-line-configuration-options
:mod:`sphinx.ext.jsmath` -- Render math via JavaScript
------------------------------------------------------
.. module:: sphinx.ext.jsmath
:synopsis: Render math using JavaScript via JSMath.
This extension works just as the MathJax extension does, but uses the older
package jsMath_. It provides this config value:
.. confval:: jsmath_path
The path to the JavaScript file to include in the HTML files in order to load
JSMath. There is no default.
The path can be absolute or relative; if it is relative, it is relative to
the ``_static`` directory of the built docs.
For example, if you put JSMath into the static path of the Sphinx docs, this
value would be ``jsMath/easy/load.js``. If you host more than one
Sphinx documentation set on one server, it is advisable to install jsMath in
a shared location.
.. _dvipng: https://savannah.nongnu.org/projects/dvipng/
.. _dvisvgm: https://dvisvgm.de/
.. _dvisvgm FAQ: https://dvisvgm.de/FAQ
.. _MathJax: https://www.mathjax.org/
.. _jsMath: http://www.math.union.edu/~dpvc/jsmath/
.. _LaTeX preview package: https://www.gnu.org/software/auctex/preview-latex.html
| PypiClean |
/MetaCalls-0.0.5-cp310-cp310-manylinux2014_x86_64.whl/metacalls/node_modules/make-dir/node_modules/semver/bin/semver.js | // Standalone semver comparison program.
// Exits successfully and prints matching version(s) if
// any supplied version is valid and passes all tests.
var argv = process.argv.slice(2)
var versions = []
var range = []
var inc = null
var version = require('../package.json').version
var loose = false
var includePrerelease = false
var coerce = false
var rtl = false
var identifier
var semver = require('../semver')
var reverse = false
var options = {}
main()
function main () {
if (!argv.length) return help()
while (argv.length) {
var a = argv.shift()
var indexOfEqualSign = a.indexOf('=')
if (indexOfEqualSign !== -1) {
a = a.slice(0, indexOfEqualSign)
argv.unshift(a.slice(indexOfEqualSign + 1))
}
switch (a) {
case '-rv': case '-rev': case '--rev': case '--reverse':
reverse = true
break
case '-l': case '--loose':
loose = true
break
case '-p': case '--include-prerelease':
includePrerelease = true
break
case '-v': case '--version':
versions.push(argv.shift())
break
case '-i': case '--inc': case '--increment':
switch (argv[0]) {
case 'major': case 'minor': case 'patch': case 'prerelease':
case 'premajor': case 'preminor': case 'prepatch':
inc = argv.shift()
break
default:
inc = 'patch'
break
}
break
case '--preid':
identifier = argv.shift()
break
case '-r': case '--range':
range.push(argv.shift())
break
case '-c': case '--coerce':
coerce = true
break
case '--rtl':
rtl = true
break
case '--ltr':
rtl = false
break
case '-h': case '--help': case '-?':
return help()
default:
versions.push(a)
break
}
}
var options = { loose: loose, includePrerelease: includePrerelease, rtl: rtl }
versions = versions.map(function (v) {
return coerce ? (semver.coerce(v, options) || { version: v }).version : v
}).filter(function (v) {
return semver.valid(v)
})
if (!versions.length) return fail()
if (inc && (versions.length !== 1 || range.length)) { return failInc() }
for (var i = 0, l = range.length; i < l; i++) {
versions = versions.filter(function (v) {
return semver.satisfies(v, range[i], options)
})
if (!versions.length) return fail()
}
return success(versions)
}
function failInc () {
console.error('--inc can only be used on a single version with no range')
fail()
}
function fail () { process.exit(1) }
function success () {
var compare = reverse ? 'rcompare' : 'compare'
versions.sort(function (a, b) {
return semver[compare](a, b, options)
}).map(function (v) {
return semver.clean(v, options)
}).map(function (v) {
return inc ? semver.inc(v, inc, options, identifier) : v
}).forEach(function (v, i, _) { console.log(v) })
}
function help () {
console.log(['SemVer ' + version,
'',
'A JavaScript implementation of the https://semver.org/ specification',
'Copyright Isaac Z. Schlueter',
'',
'Usage: semver [options] <version> [<version> [...]]',
'Prints valid versions sorted by SemVer precedence',
'',
'Options:',
'-r --range <range>',
' Print versions that match the specified range.',
'',
'-i --increment [<level>]',
' Increment a version by the specified level. Level can',
' be one of: major, minor, patch, premajor, preminor,',
" prepatch, or prerelease. Default level is 'patch'.",
' Only one version may be specified.',
'',
'--preid <identifier>',
' Identifier to be used to prefix premajor, preminor,',
' prepatch or prerelease version increments.',
'',
'-l --loose',
' Interpret versions and ranges loosely',
'',
'-p --include-prerelease',
' Always include prerelease versions in range matching',
'',
'-c --coerce',
' Coerce a string into SemVer if possible',
' (does not imply --loose)',
'',
'--rtl',
' Coerce version strings right to left',
'',
'--ltr',
' Coerce version strings left to right (default)',
'',
'Program exits successfully if any valid version satisfies',
'all supplied ranges, and prints all satisfying versions.',
'',
'If no satisfying versions are found, then exits failure.',
'',
'Versions are printed in ascending order, so supplying',
'multiple versions to the utility will just sort them.'
].join('\n'))
} | PypiClean |
/Graph_RL-0.1.2.tar.gz/Graph_RL-0.1.2/graph_rl/envs/obstacle_env.py | import numpy as np
import gym
from gym import spaces
from gym.utils import seeding
from .graphics_utils import ArrowConfig, get_default_subgoal_colors
class ObstacleEnv(gym.GoalEnv):
metadata = {'render.modes': ['human']}
def __init__(self, obstacle_radius=0.5, stage_dimension=6.,
agent_speed=5e-2, max_episode_length=300, subgoal_radius=0.033):
super().__init__()
self.stage_dimension = stage_dimension
self.obstacle_radius = obstacle_radius
self.agent_speed = agent_speed
self.max_episode_length = max_episode_length
desired_goal_space = spaces.Box(
low = -1.,
high = 1.,
shape = (2,),
dtype = np.float32)
achieved_goal_space = desired_goal_space
obs_space = desired_goal_space
self.observation_space = spaces.Dict({
"observation": obs_space,
"desired_goal": desired_goal_space,
"achieved_goal": achieved_goal_space
})
self.action_space = spaces.Box(
low = -1.,
high = 1.,
shape = (2,),
dtype = np.float32)
self.window = None
self.window_width = 800
self.window_height = 800
self.background_color = (1.0, 1.0, 1.0, 1.0)
self.obstacle_color = (0.4, 0.4, 0.4)
self.agent_position = np.array((0., -1.0))
self.agent_radius = 0.1
self.agent_color = (0.0, 0.0, 0.0)
self._draw_goal()
self.goal_radius = 0.1
self.goal_color = (0.0, 0.0, 0.0)
self.current_step = 0
self._subgoals = []
self._timed_subgoals = []
self._tolerances = []
self._subgoal_colors = get_default_subgoal_colors()
self.subgoal_radius = float(subgoal_radius)*self.stage_dimension*0.5
self.function_grid = None
self.color_low = np.array((0., 0., 1.))
self.color_high = np.array((1., 0., 0.))
self.value_low = -10.
self.value_high = 0.
def _draw_goal(self):
while True:
candidate = np.random.normal((0., 1.), 0.3, size = (2,))
if np.linalg.norm(candidate) > self.agent_radius + self.obstacle_radius:
self.goal = candidate
break
def update_function_grid(self, values, low, high):
self.function_grid = values
self.value_low = low
self.value_high = high
def compute_reward(self, achieved_goal, desired_goal, info):
if np.linalg.norm(achieved_goal - desired_goal) <= \
self.goal_radius/self.stage_dimension/0.5:
return 0.
else:
return -1.
def _get_obs(self):
obs = {
"observation": self.agent_position/self.stage_dimension*2.,
"desired_goal" : self.goal/self.stage_dimension*2.,
"achieved_goal": self.agent_position/self.stage_dimension*2.
}
return obs
@classmethod
def map_to_env_goal(self, partial_obs):
return partial_obs
def step(self, action):
self.agent_position += self.agent_speed*np.array(action)
self.agent_position = np.clip(self.agent_position,
-0.5*self.stage_dimension, 0.5*self.stage_dimension)
distance_to_center = np.linalg.norm(self.agent_position)
if distance_to_center < self.obstacle_radius + self.agent_radius:
self.agent_position *= (self.obstacle_radius + self.agent_radius)/distance_to_center
info = {}
obs = self._get_obs()
reward = self.compute_reward(self.agent_position/self.stage_dimension*2.,
self.goal/self.stage_dimension*2., info)
self.current_step += 1
done = reward == 0. or self.current_step >= self.max_episode_length
return obs, reward, done, info
def reset(self):
self.agent_position = np.array((0., -1.0))
self.current_step = 0
self._draw_goal()
return self._get_obs()
def update_subgoals(self, subgoals):
self._subgoals = [np.array(sg)*self.stage_dimension*0.5 for sg in subgoals]
def update_timed_subgoals(self, timed_subgoals, tolerances):
self._timed_subgoals = timed_subgoals
self._tolerances = tolerances
for ts in self._timed_subgoals:
if ts is not None:
ts.goal = self.stage_dimension*0.5*ts.goal
def render(self, mode='human', close=False):
import pyglet
import pyglet.gl as gl
from .pyglet_utils import (draw_circle_sector, draw_box, draw_line, draw_vector, draw_vector_with_outline,
draw_circular_subgoal)
if self.window is None:
self.window = pyglet.window.Window(width = self.window_width,
height = self.window_height,
vsync = True,
resizable = True)
gl.glClearColor(*self.background_color)
@self.window.event
def on_resize(width, height):
gl.glViewport(0, 0, width, height)
gl.glMatrixMode(gl.GL_PROJECTION)
gl.glLoadIdentity()
gl.glOrtho(-0.5*self.stage_dimension,
0.5*self.stage_dimension,
-0.5*float(height)/width*self.stage_dimension,
0.5*float(height)/width*self.stage_dimension,
-1.,
1.)
gl.glMatrixMode(gl.GL_MODELVIEW)
return pyglet.event.EVENT_HANDLED
def draw_function_grid(values, a, b, color_low, color_high, value_low, value_high):
diff = np.array(b) - np.array(a)
diff[0] /= values.shape[0]
diff[1] /= values.shape[1]
v_diff = value_high - value_low
for j in range(values.shape[1]):
gl.glBegin(gl.GL_TRIANGLE_STRIP)
for i in range(values.shape[0]):
value = np.clip(values[i, j], value_low, value_high)
v = (value - value_low)/v_diff
gl.glColor3f(*list(v*color_high + (1. - v)*color_low))
gl.glVertex2f(a[0] + diff[0]*i, a[1] + diff[1]*j)
value = np.clip(values[i, min(j + 1, values.shape[1] - 1)], value_low, value_high)
v = (value - value_low)/v_diff
gl.glColor3f(*list(v*color_high + (1. - v)*color_low))
gl.glVertex2f(a[0] + diff[0]*i, a[1] + diff[1]*(j + 1))
gl.glVertex2f(a[0] + diff[0]*values.shape[0], a[1] + diff[1]*j)
gl.glVertex2f(a[0] + diff[0]*values.shape[0], a[1] + diff[1]*(j + 1))
gl.glEnd()
def draw_timed_circular_subgoal(position, delta_t_ach, delta_t_comm, radius, color):
draw_circular_subgoal(position, None, radius, color, None)
# desired time until achievement
draw_box(position + (0., radius + 0.05), delta_t_ach/100., 0.03, 0., color)
# remaining commitment time
draw_box(position + (0., radius + 0.02), delta_t_comm/100., 0.03, 0., (0., 0., 0.))
self.window.clear()
self.window.switch_to()
self.window.dispatch_events()
gl.glLoadIdentity()
n_triangles = 32
if self.function_grid is not None:
draw_function_grid(self.function_grid, -0.5*self.stage_dimension*np.ones(2),
0.5*self.stage_dimension*np.ones(2), self.color_low, self.color_high,
self.value_low, self.value_high)
# obstacle
draw_circle_sector([0., 0.],
0.,
self.obstacle_radius,
n_triangles,
self.obstacle_color,
n_triangles)
# subgoals
for subgoal, color in zip(self._subgoals, self._subgoal_colors):
draw_circle_sector(subgoal,
0.,
self.subgoal_radius,
n_triangles,
(0., 0., 0.),
n_triangles)
draw_circle_sector(subgoal,
0.,
0.8*self.subgoal_radius,
n_triangles,
color,
n_triangles)
# timed subgoals
for ts, color, tol in zip(self._timed_subgoals, self._subgoal_colors, self._tolerances):
if ts is not None:
r = tol if tol is not None else self.subgoal_radius
draw_timed_circular_subgoal(ts.goal, ts.delta_t_ach,
ts.delta_t_comm, r, color)
# goal
draw_circle_sector(self.goal,
0.,
self.goal_radius,
n_triangles,
self.goal_color,
n_triangles)
draw_circle_sector(self.goal,
0.,
self.goal_radius*0.8,
n_triangles,
self.background_color[:3],
n_triangles)
# agent
draw_circle_sector(self.agent_position,
0.,
self.agent_radius,
n_triangles,
self.agent_color,
n_triangles)
self.window.flip()
class ObstacleEnvHAC(ObstacleEnv):
"""Version of obstalce environment that uses interface
required by implementation of HAC by Andrew Levy."""
def __init__(self):
super().__init__()
self.name = "ObstacleEnvHAC"
self.max_actions = int(self.max_episode_length)
self.visualize = False
self.visualize_every_nth_episode = 10
self.episode_counter = 0
# Projection functions
self.project_state_to_end_goal = lambda s, s2: s
self.project_state_to_subgoal = lambda s, s2: s
# variables needed for HAC implementation
self.state_dim = self.observation_space["observation"].low.shape[0]
self.action_dim = self.action_space.low.shape[0] # low-level action dim
self.action_bounds = np.ones(2) # low-level action bounds
self.action_offset = np.zeros((len(self.action_bounds))) # Assumes symmetric low-level action ranges
self.end_goal_dim = self.observation_space["desired_goal"].low.shape[0]
self.subgoal_dim = 2
self.subgoal_bounds = np.array([
[-1., 1.],
[-1., 1.],
])
self.subgoal_bounds_symmetric = np.array(
[1., 1.])
self.subgoal_bounds_offset = np.zeros((2))
# End goal/subgoal thresholds
self.subgoal_thresholds = np.ones(2)*(self.goal_radius/self.stage_dimension*2.)
self.end_goal_thresholds = np.ones(2)*(self.goal_radius/self.stage_dimension*2.)
def get_state(self):
return self._get_obs()["observation"]
def reset_sim(self):
# save old goal
old_goal = self.goal
# reset (this also overwrites self._desired_goal)
self.reset()
# restore old self._desired_goal (in order to not mess up HAC implementation)
# (Note: If this step is omitted, the goal in the HAC implementation and in
# the environment diverge and rendering and reward calculation in the environment
# are off.)
self.goal = old_goal
self.episode_counter += 1
# Return state
return self.get_state()
def render(self):
super().render(self)
def execute_action(self, action):
obs, reward, done, info = self.step(action)
self.done = done
if self.visualize:
if self.episode_counter % self.visualize_every_nth_episode == 0:
self.render()
# call the current state "sim" in order to trick the HAC implementation which
# expects an underlying Mujoco simulation
self.sim = self.get_state()
return self.sim
def display_end_goal(self, end_goal):
pass
def get_next_goal(self, test):
self._draw_goal()
return self.goal/self.stage_dimension*2.0
def display_subgoals(self, subgoals):
self.update_subgoals(subgoals)
def set_visualization(self, visualize, visualize_every_nth_episode):
self.visualize = visualize
self.visualize_every_nth_episode = visualize_every_nth_episode | PypiClean |
/Comicsru-1.0.0.tar.gz/Comicsru-1.0.0/src/Comicsru.py | import requests
from bs4 import BeautifulSoup
import os
import sys
from urllib.request import Request, urlopen
import shutil
from shutil import make_archive
import img2pdf
import glob
base_url = 'https://readcomicsonline.ru/'
def get_summary(comic_name):
try:
url=f'{base_url}comic/{comic_name}'
response = requests.get(url)
soup = BeautifulSoup(response.text, 'lxml')
summary = soup.find('p')
print(summary.text)
except:
print('Error')
sys.exit()
def suggest_random():
try:
random_url = f'{base_url}random'
response = requests.get(random_url)
soup = BeautifulSoup(response.text, 'html.parser')
random_title = soup.title
random_comic = random_title.text
print(random_comic)
except:
print('Error')
sys.exit()
def download_comic(get_url,format):
try:
html_text = requests.get(get_url).text
soup = BeautifulSoup(html_text, 'lxml')
divs = soup.find("div", id="all")
img = divs.find_all('img')
directory = get_url.replace('https://readcomicsonline.ru/comic/','').replace('/','-ch-')
except:
print('Error')
sys.exit()
try:
os.mkdir(directory)
except:
pass
for l in img:
link = l.get('data-src')
filename = link[-7:]
full_path = '{}/{}'.format(directory, filename)
req = Request(link, headers={'User-Agent': 'Mozilla/5.0'})
web = urlopen(req)
with open(full_path, 'wb') as f:
f.write(web.read())
if format == 'cbz':
make_archive(directory, 'zip', root_dir=f'./{directory}')
my_file = f'{directory}.zip'
base = os.path.splitext(my_file)[0]
os.rename(my_file, base + '.cbz')
shutil.rmtree(directory)
print('completed!')
elif format == 'zip':
make_archive(directory, 'zip', root_dir=f'./{directory}')
shutil.rmtree(directory)
print('completed!')
elif format == 'pdf':
with open(f"{directory}.pdf","wb") as f:
f.write(img2pdf.convert(glob.glob(f"{directory}/*.jpg")))
shutil.rmtree(directory)
print('completed!')
elif format == 'jpg':
exit
else:
print(f'{format} format not allowed. Try jpg, pdf, cbz or zip') | PypiClean |
/BlitzChain-0.8.2.tar.gz/BlitzChain-0.8.2/README.md | # Blitzchain
Retrieval-Augmented Generation For Powerful Results
## Installation
```
pip install blitzchain
```
Once you install, you can get your API key from https://app.twilix.io/
If you would like to then use this for your solutions, we recommend the following:
## QuickStart
```python
import blitzchain
# Get API key from https://app.twilix.io
client = blitzchain.Client(api_key="XYZ")
collection = client.Collection()
# sample documents
handbook_example_1 = {
"section": "Introduction",
"content": "Welcome to ABC Corporation! This employee handbook provides you with important information about our company policies and procedures."
}
handbook_example_2 = {
"section": "Employment",
"content": "At ABC Corporation, we believe in equal opportunity employment. We hire based on qualifications, skills, and experience, without discrimination on the basis of race, gender, religion, or any other protected status."
}
handbook_example_3 = {
"section": "Code of Conduct",
"content": "We expect all employees to conduct themselves professionally and ethically at all times. Treat colleagues, customers, and partners with respect and courtesy."
}
handbook_example_4 = {
"section": "Work Hours",
"content": "Our regular work hours are from 9:00 AM to 5:00 PM, Monday to Friday. Be punctual and adhere to your assigned schedule. Notify your supervisor in advance for any planned time off."
}
handbook_example_5 = {
"section": "Dress Code",
"content": "We maintain a business casual dress code. Dress appropriately for your role, maintaining a clean and professional appearance."
}
handbook_example_6 = {
"section": "Confidentiality",
"content": "As an employee of ABC Corporation, you may come across confidential information. Safeguard and maintain the confidentiality of such information, both during and after your employment."
}
handbook_example_7 = {
"section": "Performance Reviews",
"content": "We conduct regular performance reviews to provide feedback and evaluate your work. This process helps identify areas of improvement and recognize outstanding performance."
}
handbook_example_8 = {
"section": "Leave and Time Off",
"content": "We offer various types of leave, including vacation, sick leave, and parental leave. Familiarize yourself with the procedures and guidelines outlined in our leave policy."
}
handbook_example_9 = {
"section": "Technology Usage",
"content": "Use company-provided technology resources responsibly and solely for work-related purposes. Follow our IT policies and guidelines to ensure the security and integrity of our systems."
}
handbook_example_10 = {
"section": "Termination",
"content": "In the event of termination, return all company property and ensure a smooth transition of responsibilities. Follow the procedures outlined in our termination policy."
}
objects = [
handbook_example_1,
handbook_example_2,
handbook_example_3,
handbook_example_4,
handbook_example_5,
handbook_example_6,
handbook_example_7,
handbook_example_8,
handbook_example_9
]
collection.insert_objects(docs)
```
## Retrieving RAG Results
```python
collection.generative_qa(
user_input="Why?",
prompt_fields=["content"],
)
```
# Documentation
If you would like to read more about how to use this -
we recommend visiting [https://docs.twilix.io](https://docs.twilix.io)
| PypiClean |
/DLRN-0.26.1.tar.gz/DLRN-0.26.1/dlrn/migrations/versions/2a0313a8a7d6_change_user_usernames_column_length.py | from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '2a0313a8a7d6'
down_revision = 'ade85b2396bc'
branch_labels = None
depends_on = None
def upgrade():
with op.batch_alter_table("civotes") as batch_op:
batch_op.drop_constraint('civ_user_fk', type_='foreignkey')
with op.batch_alter_table("promotions") as batch_op:
batch_op.drop_constraint('prom_user_fk', type_='foreignkey')
with op.batch_alter_table("users") as batch_op:
batch_op.alter_column('username', existing_type=sa.String(256),
type_=sa.String(255))
with op.batch_alter_table("civotes") as batch_op:
batch_op.alter_column('user', existing_type=sa.String(256),
type_=sa.String(255))
batch_op.create_foreign_key(
constraint_name="civ_user_fk",
referent_table="users",
local_cols=["user"],
remote_cols=["username"])
with op.batch_alter_table("promotions") as batch_op:
batch_op.alter_column('user', existing_type=sa.String(256),
type_=sa.String(255))
batch_op.create_foreign_key(
constraint_name="prom_user_fk",
referent_table="users",
local_cols=["user"],
remote_cols=["username"])
def downgrade():
with op.batch_alter_table("civotes") as batch_op:
batch_op.drop_constraint('civ_user_fk', type_='foreignkey')
with op.batch_alter_table("promotions") as batch_op:
batch_op.drop_constraint('prom_user_fk', type_='foreignkey')
with op.batch_alter_table("users") as batch_op:
batch_op.alter_column('username', existing_type=sa.String(255),
type_=sa.String(256))
with op.batch_alter_table("civotes") as batch_op:
batch_op.alter_column('user', existing_type=sa.String(255),
type_=sa.String(256))
batch_op.create_foreign_key(
constraint_name="civ_user_fk",
referent_table="users",
local_cols=["user"],
remote_cols=["username"])
with op.batch_alter_table("promotions") as batch_op:
batch_op.alter_column('user', existing_type=sa.String(255),
type_=sa.String(256))
batch_op.create_foreign_key(
constraint_name="prom_user_fk",
referent_table="users",
local_cols=["user"],
remote_cols=["username"]) | PypiClean |
/Nuitka_fixed-1.1.2-cp310-cp310-win_amd64.whl/nuitka/build/inline_copy/lib/scons-4.4.0/SCons/Tool/cc.py |
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import SCons.Tool
import SCons.Defaults
import SCons.Util
CSuffixes = ['.c', '.m']
if not SCons.Util.case_sensitive_suffixes('.c', '.C'):
CSuffixes.append('.C')
def add_common_cc_variables(env):
"""
Add underlying common "C compiler" variables that
are used by multiple tools (specifically, c++).
"""
if '_CCCOMCOM' not in env:
env['_CCCOMCOM'] = '$CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS'
# It's a hack to test for darwin here, but the alternative
# of creating an applecc.py to contain this seems overkill.
# Maybe someday the Apple platform will require more setup and
# this logic will be moved.
env['FRAMEWORKS'] = SCons.Util.CLVar('')
env['FRAMEWORKPATH'] = SCons.Util.CLVar('')
if env['PLATFORM'] == 'darwin':
env['_CCCOMCOM'] = env['_CCCOMCOM'] + ' $_FRAMEWORKPATH'
if 'CCFLAGS' not in env:
env['CCFLAGS'] = SCons.Util.CLVar('')
if 'SHCCFLAGS' not in env:
env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS')
compilers = ['cc']
def generate(env):
"""
Add Builders and construction variables for C compilers to an Environment.
"""
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
for suffix in CSuffixes:
static_obj.add_action(suffix, SCons.Defaults.CAction)
shared_obj.add_action(suffix, SCons.Defaults.ShCAction)
static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter)
shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter)
add_common_cc_variables(env)
if 'CC' not in env:
env['CC'] = env.Detect(compilers) or compilers[0]
env['CFLAGS'] = SCons.Util.CLVar('')
env['CCCOM'] = '$CC -o $TARGET -c $CFLAGS $CCFLAGS $_CCCOMCOM $SOURCES'
env['SHCC'] = '$CC'
env['SHCFLAGS'] = SCons.Util.CLVar('$CFLAGS')
env['SHCCCOM'] = '$SHCC -o $TARGET -c $SHCFLAGS $SHCCFLAGS $_CCCOMCOM $SOURCES'
env['CPPDEFPREFIX'] = '-D'
env['CPPDEFSUFFIX'] = ''
env['INCPREFIX'] = '-I'
env['INCSUFFIX'] = ''
env['SHOBJSUFFIX'] = '.os'
env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 0
env['CFILESUFFIX'] = '.c'
def exists(env):
return env.Detect(env.get('CC', compilers))
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4: | PypiClean |
/MicroPyDD-restplus-0.0.7.tar.gz/MicroPyDD-restplus-0.0.7/README.md | [](https://gitlab.com/micropydd/micropydd-restplus/commits/master) [](https://gitlab.com/micropydd/micropydd-restplus/commits/master) [](https://badge.fury.io/py/MicroPyDD-restplus)
# MicroPyDD Flask-Restplus
This module is a simple wrapper that simplifies the setup of a restplus project. Furthermore adds some helper methods to the prohect:
```
GET /misc/config/
GET /misc/postman/
GET /misc/version/
GET /loggers/{id}/
GET /loggers/
POST /loggers/
``` | PypiClean |
/GBT_parser-1.0.3-py3-none-any.whl/cantools/database/can/formats/arxml/ecu_extract_loader.py | import logging
from decimal import Decimal
from typing import Any, List
from ....utils import sort_signals_by_start_bit, type_sort_signals
from ...bus import Bus
from ...internal_database import InternalDatabase
from ...message import Message
from ...signal import Decimal as SignalDecimal
from ...signal import Signal
def make_xpath(location: List[str]) -> str:
"""Convenience function to traverse the XML element tree more easily
(This function is only used by the EcuExtractLoader.)"""
return './ns:' + '/ns:'.join(location)
LOGGER = logging.getLogger(__name__)
# The ARXML XML namespace for the EcuExtractLoader
NAMESPACE = 'http://autosar.org/schema/r4.0'
NAMESPACES = {'ns': NAMESPACE}
ECUC_VALUE_COLLECTION_XPATH = make_xpath([
'AR-PACKAGES',
'AR-PACKAGE',
'ELEMENTS',
'ECUC-VALUE-COLLECTION'
])
ECUC_MODULE_CONFIGURATION_VALUES_REF_XPATH = make_xpath([
'ECUC-VALUES',
'ECUC-MODULE-CONFIGURATION-VALUES-REF-CONDITIONAL',
'ECUC-MODULE-CONFIGURATION-VALUES-REF'
])
ECUC_REFERENCE_VALUE_XPATH = make_xpath([
'REFERENCE-VALUES',
'ECUC-REFERENCE-VALUE'
])
DEFINITION_REF_XPATH = make_xpath(['DEFINITION-REF'])
VALUE_XPATH = make_xpath(['VALUE'])
VALUE_REF_XPATH = make_xpath(['VALUE-REF'])
SHORT_NAME_XPATH = make_xpath(['SHORT-NAME'])
PARAMETER_VALUES_XPATH = make_xpath(['PARAMETER-VALUES'])
REFERENCE_VALUES_XPATH = make_xpath([
'REFERENCE-VALUES'
])
class EcuExtractLoader:
def __init__(self,
root:Any,
strict:bool,
sort_signals:type_sort_signals=sort_signals_by_start_bit):
self.root = root
self.strict = strict
self.sort_signals = sort_signals
def load(self) -> InternalDatabase:
buses:List[Bus] = []
messages = []
version = None
ecuc_value_collection = self.root.find(ECUC_VALUE_COLLECTION_XPATH,
NAMESPACES)
values_refs = ecuc_value_collection.iterfind(
ECUC_MODULE_CONFIGURATION_VALUES_REF_XPATH,
NAMESPACES)
com_xpaths = [
value_ref.text
for value_ref in values_refs
if value_ref.text.endswith('/Com')
]
if len(com_xpaths) != 1:
raise ValueError(
f'Expected 1 /Com, but got {len(com_xpaths)}.')
com_config = self.find_com_config(com_xpaths[0] + '/ComConfig')
for ecuc_container_value in com_config:
definition_ref = ecuc_container_value.find(DEFINITION_REF_XPATH,
NAMESPACES).text
if not definition_ref.endswith('ComIPdu'):
continue
message = self.load_message(ecuc_container_value)
if message is not None:
messages.append(message)
return InternalDatabase(messages,
[],
buses,
version)
def load_message(self, com_i_pdu):
# Default values.
interval = None
senders = []
comments = None
# Name, frame id, length and is_extended_frame.
name = com_i_pdu.find(SHORT_NAME_XPATH, NAMESPACES).text
direction = None
for parameter, value in self.iter_parameter_values(com_i_pdu):
if parameter == 'ComIPduDirection':
direction = value
break
com_pdu_id_ref = None
for reference, value in self.iter_reference_values(com_i_pdu):
if reference == 'ComPduIdRef':
com_pdu_id_ref = value
break
if com_pdu_id_ref is None:
raise ValueError('No ComPduIdRef reference found.')
if direction == 'SEND':
frame_id, length, is_extended_frame = self.load_message_tx(
com_pdu_id_ref)
elif direction == 'RECEIVE':
frame_id, length, is_extended_frame = self.load_message_rx(
com_pdu_id_ref)
else:
raise NotImplementedError(
f'Direction {direction} not supported.')
if frame_id is None:
LOGGER.warning('No frame id found for message %s.', name)
return None
if is_extended_frame is None:
LOGGER.warning('No frame type found for message %s.', name)
return None
if length is None:
LOGGER.warning('No length found for message %s.', name)
return None
# ToDo: interval, senders, comments
# Find all signals in this message.
signals = []
values = com_i_pdu.iterfind(ECUC_REFERENCE_VALUE_XPATH,
NAMESPACES)
for value in values:
definition_ref = value.find(DEFINITION_REF_XPATH,
NAMESPACES).text
if not definition_ref.endswith('ComIPduSignalRef'):
continue
value_ref = value.find(VALUE_REF_XPATH, NAMESPACES)
signal = self.load_signal(value_ref.text)
if signal is not None:
signals.append(signal)
return Message(frame_id=frame_id,
is_extended_frame=is_extended_frame,
name=name,
length=length,
senders=senders,
send_type=None,
cycle_time=interval,
signals=signals,
comment=comments,
bus_name=None,
strict=self.strict,
sort_signals=self.sort_signals)
def load_message_tx(self, com_pdu_id_ref):
return self.load_message_rx_tx(com_pdu_id_ref,
'CanIfTxPduCanId',
'CanIfTxPduDlc',
'CanIfTxPduCanIdType')
def load_message_rx(self, com_pdu_id_ref):
return self.load_message_rx_tx(com_pdu_id_ref,
'CanIfRxPduCanId',
'CanIfRxPduDlc',
'CanIfRxPduCanIdType')
def load_message_rx_tx(self,
com_pdu_id_ref,
parameter_can_id,
parameter_dlc,
parameter_can_id_type):
can_if_tx_pdu_cfg = self.find_can_if_rx_tx_pdu_cfg(com_pdu_id_ref)
frame_id = None
length = None
is_extended_frame = None
if can_if_tx_pdu_cfg is not None:
for parameter, value in self.iter_parameter_values(can_if_tx_pdu_cfg):
if parameter == parameter_can_id:
frame_id = int(value)
elif parameter == parameter_dlc:
length = int(value)
elif parameter == parameter_can_id_type:
is_extended_frame = (value == 'EXTENDED_CAN')
return frame_id, length, is_extended_frame
def load_signal(self, xpath):
ecuc_container_value = self.find_value(xpath)
if ecuc_container_value is None:
return None
name = ecuc_container_value.find(SHORT_NAME_XPATH, NAMESPACES).text
# Default values.
is_signed = False
is_float = False
minimum = None
maximum = None
factor = 1.0
offset = 0.0
unit = None
choices = None
comments = None
receivers = []
decimal = SignalDecimal(Decimal(factor), Decimal(offset))
# Bit position, length, byte order, is_signed and is_float.
bit_position = None
length = None
byte_order = None
for parameter, value in self.iter_parameter_values(ecuc_container_value):
if parameter == 'ComBitPosition':
bit_position = int(value)
elif parameter == 'ComBitSize':
length = int(value)
elif parameter == 'ComSignalEndianness':
byte_order = value.lower()
elif parameter == 'ComSignalType':
if value in ['SINT8', 'SINT16', 'SINT32']:
is_signed = True
elif value in ['FLOAT32', 'FLOAT64']:
is_float = True
if bit_position is None:
LOGGER.warning('No bit position found for signal %s.',name)
return None
if length is None:
LOGGER.warning('No bit size found for signal %s.', name)
return None
if byte_order is None:
LOGGER.warning('No endianness found for signal %s.', name)
return None
# ToDo: minimum, maximum, factor, offset, unit, choices,
# comments and receivers.
return Signal(name=name,
start=bit_position,
length=length,
receivers=receivers,
byte_order=byte_order,
is_signed=is_signed,
scale=factor,
offset=offset,
minimum=minimum,
maximum=maximum,
unit=unit,
choices=choices,
comment=comments,
is_float=is_float,
decimal=decimal)
def find_com_config(self, xpath):
return self.root.find(make_xpath([
"AR-PACKAGES",
"AR-PACKAGE/[ns:SHORT-NAME='{}']".format(xpath.split('/')[1]),
"ELEMENTS",
"ECUC-MODULE-CONFIGURATION-VALUES/[ns:SHORT-NAME='Com']",
"CONTAINERS",
"ECUC-CONTAINER-VALUE/[ns:SHORT-NAME='ComConfig']",
"SUB-CONTAINERS"
]),
NAMESPACES)
def find_value(self, xpath):
return self.root.find(make_xpath([
"AR-PACKAGES",
"AR-PACKAGE/[ns:SHORT-NAME='{}']".format(xpath.split('/')[1]),
"ELEMENTS",
"ECUC-MODULE-CONFIGURATION-VALUES/[ns:SHORT-NAME='Com']",
"CONTAINERS",
"ECUC-CONTAINER-VALUE/[ns:SHORT-NAME='ComConfig']",
"SUB-CONTAINERS",
"ECUC-CONTAINER-VALUE/[ns:SHORT-NAME='{}']".format(xpath.split('/')[-1])
]),
NAMESPACES)
def find_can_if_rx_tx_pdu_cfg(self, com_pdu_id_ref):
messages = self.root.iterfind(
make_xpath([
"AR-PACKAGES",
"AR-PACKAGE/[ns:SHORT-NAME='{}']".format(
com_pdu_id_ref.split('/')[1]),
"ELEMENTS",
"ECUC-MODULE-CONFIGURATION-VALUES/[ns:SHORT-NAME='CanIf']",
'CONTAINERS',
"ECUC-CONTAINER-VALUE/[ns:SHORT-NAME='CanIfInitCfg']",
'SUB-CONTAINERS',
'ECUC-CONTAINER-VALUE'
]),
NAMESPACES)
for message in messages:
definition_ref = message.find(DEFINITION_REF_XPATH,
NAMESPACES).text
if definition_ref.endswith('CanIfTxPduCfg'):
expected_reference = 'CanIfTxPduRef'
elif definition_ref.endswith('CanIfRxPduCfg'):
expected_reference = 'CanIfRxPduRef'
else:
continue
for reference, value in self.iter_reference_values(message):
if reference == expected_reference:
if value == com_pdu_id_ref:
return message
def iter_parameter_values(self, param_conf_container):
parameters = param_conf_container.find(PARAMETER_VALUES_XPATH,
NAMESPACES)
if parameters is None:
raise ValueError('PARAMETER-VALUES does not exist.')
for parameter in parameters:
definition_ref = parameter.find(DEFINITION_REF_XPATH,
NAMESPACES).text
value = parameter.find(VALUE_XPATH, NAMESPACES).text
name = definition_ref.split('/')[-1]
yield name, value
def iter_reference_values(self, param_conf_container):
references = param_conf_container.find(REFERENCE_VALUES_XPATH,
NAMESPACES)
if references is None:
raise ValueError('REFERENCE-VALUES does not exist.')
for reference in references:
definition_ref = reference.find(DEFINITION_REF_XPATH,
NAMESPACES).text
value = reference.find(VALUE_REF_XPATH, NAMESPACES).text
name = definition_ref.split('/')[-1]
yield name, value | PypiClean |
/FFGo-1.12.7-py3-none-any.whl/ffgo/gui/tooltip.py | from tkinter import *
from .. import constants
from ..constants import TOOLTIP_BG_COL, TOOLTIP_DELAY
class ToolTipBase(Toplevel):
def __init__(self, master, bgColor=TOOLTIP_BG_COL,
offsetx=10, offsety=10, delay=TOOLTIP_DELAY,
wraplength=0, autowrap=False):
Toplevel.__init__(self, master)
self.offsetx = offsetx
self.offsety = offsety
self.delay = delay
self.id = None
self.lastPos = None
self.bgColor = bgColor
if autowrap:
self.wraplength = constants.AUTOWRAP_TOOLTIP_WIDTH
else:
self.wraplength = wraplength
# With some widgets as the master (e.g., Menu under Tk 8.6), the Motion
# event may occur even if the mouse pointer is outside the widget area.
# Therefore, we use a boolean to keep track of whether the pointer is
# inside the widget or outside, and thus whether the tooltip can be
# shown or not.
self.canBeShown = False
def postInit(self):
self.createWindow()
self.bindToMaster()
def bindToMaster(self):
self.master.bind('<Enter>', self.onEnter)
self.master.bind('<Motion>', self.onMotion)
self.master.bind('<Leave>', self.onLeave)
self.master.bind('<Button>', self.hide)
# Without this, there would be a _tkinter.TclError during the
# deiconify() call in self.show() if the window containing self.master
# were closed and the tooltip tried to appear afterwards. This also
# hides the tooltip when the user switches to another workspace.
self.master.bind('<Unmap>', self.hide)
def scheduleTooltip(self, event):
self.id = self.master.after(self.delay, self.prepareAndShow, event)
def createWindow(self):
self.overrideredirect(True)
self.createLabel().pack()
self.withdraw()
def prepareText(self, event):
"""Prepare the tooltip text.
This is one of methods subclasses are likely to need to
override, along with __init__() and createLabel().
"""
# This means: don't show the tooltip this time
return False
def prepareAndShow(self, event):
if self.prepareText(event):
# The tooltip text is ready and we are “authorized” to show it
self.show(event)
def show(self, event):
self.update()
self.deiconify()
def adjustPosition(self, event):
# Last known position of the mouse pointer, relative to the
# top-left corner of the widget.
self.lastPos = (event.x, event.y)
self.geometry('+{0}+{1}'.format(event.x_root + self.offsetx,
event.y_root + self.offsetx))
# Used as an event handler (requires the 'event' parameter) as well as from
# other parts of the program (not necessarily with an event to pass as
# argument).
def hide(self, event=None):
self.withdraw()
self.cancelId()
def cancelId(self):
if self.id is not None:
self.master.after_cancel(self.id)
self.id = None
def onEnter(self, event):
self.canBeShown = True
self.adjustPosition(event)
self.scheduleTooltip(event)
def onMotion(self, event):
self.hide()
if self.canBeShown:
self.adjustPosition(event)
self.scheduleTooltip(event)
def onLeave(self, event):
self.canBeShown = False
self.hide()
class ToolTip(ToolTipBase):
"""A Tooltip widget.
Display a tooltip text at mouse position when the mouse pointer is
over the master widget.
Arguments are:
master: parent widget
text: message to display, or None if using 'textvariable'.
This is for static tooltips.
textvariable: StringVar corresponding to a message to display, or
None if using 'text'. This allows to easily change the
tooltip text without having to create a new tooltip.
wraplength: width for automatic wrapping of the label text (no
automatic wrapping by default)
autowrap: if True, set 'wraplength' to
constants.AUTOWRAP_TOOLTIP_WIDTH to provide a standard
width for automatically-wrapped tooltips
bgColor: background color
offsetx, offsety: offset from cursor position
delay: delay in milliseconds
Old note: ToolTip might not work properly with the Frame widget. It
seems that '<Motion>' events have some problems with getting updated
cursor position there.
Update: this is probably worked around now since the addition of
ToolTipBase.canBeShown in Oct 2015 for the Menu widget, which
received Motion events even after the mouse pointer left the widget.
"""
def __init__(self, master, text=None, textvariable=None, **kwargs):
ToolTipBase.__init__(self, master, **kwargs)
self.text = text
self.textvariable = textvariable
self.postInit()
def createLabel(self):
if self.text is not None:
kwargs = {"text": self.text}
else:
kwargs = {"textvariable": self.textvariable}
return Label(self, bg=self.bgColor, justify=LEFT,
wraplength=self.wraplength, **kwargs)
def prepareText(self, event):
# Always show the toolip. The text was already prepared in
# __init__() (it is always the same for this tooltip), so there
# is nothing left to prepare.
return True
class MapBasedToolTip(ToolTipBase):
"""Abstract base class for map-based tooltips.
This means, tooltips whose text is obtained from a function that
maps parts of the underlying widget to particular tooltip texts.
Typically, the function ('itemTextFunc' below) will take a row,
column or more generally item identifier as argument and choose the
appropriate text to return (or None) depending on this information.
"""
def __init__(self, master, itemTextFunc, **kwargs):
"""Constructor for MapBasedToolTip instances.
master -- a widget
itemTextFunc -- a function whose signature may vary among
concrete subclasses of this class. Its
argument(s) should allow to determine an
appropriate tooltip text. If it returns None, no
tooltip will be shown; otherwise, the return
value should be a string that will be used as
tooltip text.
Additional keyword arguments are passed to ToolTipBase's
constructor.
"""
ToolTipBase.__init__(self, master, **kwargs)
self.itemTextFunc = itemTextFunc
self.textVar = StringVar()
self.postInit()
def createLabel(self):
return Label(self, textvariable=self.textVar, bg=self.bgColor,
justify=LEFT, wraplength=self.wraplength)
def setItemTextFunc(self, itemTextFunc):
"""Replace the existing 'itemTextFunc' callback function."""
self.itemTextFunc = itemTextFunc
class ListBoxToolTip(MapBasedToolTip):
def __init__(self, master, itemTextFunc=lambda i: None, **kwargs):
"""Constructor for ListBoxToolTip instances.
master -- a ListBox instance
itemTextFunc -- a function taking one argument. When called, the
function will be passed the index of an item in
the ListBox (starting from 0). If it returns
None, no tooltip will be shown for this item;
otherwise, the return value should be a string
that will be used as the tooltip for this item.
Additional keyword arguments are passed to ToolTipBase's
constructor.
"""
MapBasedToolTip.__init__(self, master, itemTextFunc, **kwargs)
def prepareText(self, event):
if self.lastPos is None:
return False
nearestIdx = self.master.nearest(self.lastPos[1])
bbox = self.master.bbox(nearestIdx)
if bbox is None:
return False
xOffset, yOffset, width, height = bbox
# Is the mouse pointer on the row of an item?
if not (yOffset <= self.lastPos[1] < yOffset + height):
return False
text = self.itemTextFunc(nearestIdx)
if text is not None:
self.textVar.set(text)
return True
else:
return False
class MenuToolTip(MapBasedToolTip):
def __init__(self, master, itemTextFunc=lambda i: None, **kwargs):
"""Constructor for MenuToolTip instances.
master -- the master widget; should be a Menu instance or
a compatible object
itemTextFunc -- a function taking one argument. When called, the
function will be passed the index of an item in
the Menu (starting from 0). If it returns None,
no tooltip will be shown for this item;
otherwise, the return value should be a string
that will be used as the tooltip for this item.
Additional keyword arguments are passed to ToolTipBase's
constructor.
"""
MapBasedToolTip.__init__(self, master, itemTextFunc, **kwargs)
self.highlightedItemIndex = None
def bindToMaster(self):
ToolTipBase.bindToMaster(self)
self.master.bind('<<MenuSelect>>', self.onMenuSelect)
def onMenuSelect(self, event):
# Set to None when the pointer leaves the Menu widget
self.highlightedItemIndex = event.widget.index('active')
def prepareText(self, event):
if self.highlightedItemIndex is None:
return False
text = self.itemTextFunc(self.highlightedItemIndex)
if text is not None:
self.textVar.set(text) # set the tooltip text
return True # tell the caller the tooltip must be shown
else:
# There is no tooltip to show for this item.
return False
class TreeviewToolTip(MapBasedToolTip):
def __init__(self, master,
itemTextFunc=lambda region, itemID, column: None, **kwargs):
"""Constructor for TreeviewToolTip instances.
master -- a Treeview instance
itemTextFunc -- a function taking three arguments. When called, the
function will be passed:
- the region of the Treeview widget, as
returned by Treeview.identify_region();
- the item identifier, as returned by
Treeview.identify_row();
- the data column identifier of the cell, as
returned by Treeview.identify_column().
If this function returns None, no tooltip will
be shown for this cell (or, more generally, area
of the Treeview widget); otherwise, the return
value should be a string that will be used as
tooltip text for this cell/area.
Additional keyword arguments are passed to ToolTipBase's
constructor.
"""
MapBasedToolTip.__init__(self, master, itemTextFunc, **kwargs)
def prepareText(self, event):
if self.lastPos is None:
return False
region = self.master.identify_region(event.x, event.y)
itemID = self.master.identify_row(event.y)
column = self.master.identify_column(event.x)
text = self.itemTextFunc(region, itemID, column)
if text is not None:
self.textVar.set(text)
return True
else:
return False | PypiClean |
/Mopidy-Yap-0.1.3.tar.gz/Mopidy-Yap-0.1.3/mopidy_yap/frontend.py | import json
import time
import urllib.parse
import urllib.error
import pykka
import logging
from tornado.httpclient import HTTPClient, HTTPRequest
from mopidy.core import CoreListener, Core
logger = logging.getLogger(__name__)
class YapFrontend(pykka.ThreadingActor, CoreListener):
def __init__(self, config, core: Core):
super().__init__()
self.config = config
self.core = core
self.spotify_token = None
def on_start(self):
pass
def on_stop(self):
pass
def track_playback_ended(self, tl_track, time_position):
if self.config["spotify"] and self.config["spotify"]["enabled"] and self.config["yap"]["autoplay"]:
tl_length = self.core.tracklist.get_length().get()
if tl_length == 1:
uris = self.load_more_tracks([tl_track.track.uri])
if uris:
self.core.tracklist.add(uris=uris)
def refresh_spotify_token(self):
try:
url = "https://auth.mopidy.com/spotify/token"
data = {
"client_id": self.config["spotify"]["client_id"],
"client_secret": self.config["spotify"]["client_secret"],
"grant_type": "client_credentials",
}
except Exception:
error = {
"message": "Could not refresh Spotify token: invalid configuration"
}
return error
try:
http_client = HTTPClient()
request = HTTPRequest(
url, method="POST", body=urllib.parse.urlencode(data)
)
response = http_client.fetch(request)
token = json.loads(response.body)
token["expires_at"] = time.time() + token["expires_in"]
self.spotify_token = token
response = {"spotify_token": token}
return response
except (urllib.error.HTTPError, urllib.error.URLError) as e:
error = json.loads(e.read())
error = {
"message": "Could not refresh Spotify token: "
+ error["error_description"]
}
return error
def get_spotify_token(self):
# Expired, so go get a new one
if (
not self.spotify_token
or self.spotify_token["expires_at"] <= time.time()
):
self.refresh_spotify_token()
response = {"spotify_token": self.spotify_token}
return response
def load_more_tracks(self, seed_tracks: list):
logger.debug("Loading a new track from Spotify")
try:
logger.debug("getting spotify token")
self.get_spotify_token()
spotify_token = self.spotify_token
access_token = spotify_token["access_token"]
except BaseException:
error = "MopidyFrontend: access_token missing or invalid"
logger.error(error)
return False
url = "https://api.spotify.com/v1/recommendations/"
url = (
url
+ "?seed_tracks="
+ (",".join(seed_tracks)).replace("spotify:track:", "")
)
url = url + "&limit=1"
http_client = HTTPClient()
try:
http_response = http_client.fetch(
url, headers={"Authorization": "Bearer " + access_token}
)
response_body = json.loads(http_response.body)
uris = []
for track in response_body["tracks"]:
uris.append(track["uri"])
return uris
except (urllib.error.HTTPError, urllib.error.URLError) as e:
error = json.loads(e.read())
error_response = {
"message": "Could not fetch Spotify recommendations: "
+ error["error_description"]
}
logger.error(
"Could not fetch Spotify recommendations: "
+ error["error_description"]
)
logger.debug(error_response)
return False | PypiClean |
/Flask-CKEditor-0.4.6.tar.gz/Flask-CKEditor-0.4.6/flask_ckeditor/static/standard/plugins/specialchar/dialogs/lang/he.js | /*
Copyright (c) 2003-2020, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or https://ckeditor.com/legal/ckeditor-oss-license
*/
CKEDITOR.plugins.setLang("specialchar","he",{euro:"יורו",lsquo:"סימן ציטוט יחיד שמאלי",rsquo:"סימן ציטוט יחיד ימני",ldquo:"סימן ציטוט כפול שמאלי",rdquo:"סימן ציטוט כפול ימני",ndash:"קו מפריד קצר",mdash:"קו מפריד ארוך",iexcl:"סימן קריאה הפוך",cent:"סנט",pound:"פאונד",curren:"מטבע",yen:"ין",brvbar:"קו שבור",sect:"סימן מקטע",uml:"שתי נקודות אופקיות (Diaeresis)",copy:"סימן זכויות יוצרים (Copyright)",ordf:"סימן אורדינאלי נקבי",laquo:"סימן ציטוט זווית כפולה לשמאל",not:"סימן שלילה מתמטי",reg:"סימן רשום",
macr:"מקרון (הגיה ארוכה)",deg:"מעלות",sup2:"2 בכתיב עילי",sup3:"3 בכתיב עילי",acute:"סימן דגוש (Acute)",micro:"מיקרו",para:"סימון פסקה",middot:"נקודה אמצעית",cedil:"סדיליה",sup1:"1 בכתיב עילי",ordm:"סימן אורדינאלי זכרי",raquo:"סימן ציטוט זווית כפולה לימין",frac14:"רבע בשבר פשוט",frac12:"חצי בשבר פשוט",frac34:"שלושה רבעים בשבר פשוט",iquest:"סימן שאלה הפוך",Agrave:"אות לטינית A עם גרש (Grave)",Aacute:"Latin capital letter A with acute accent",Acirc:"Latin capital letter A with circumflex",Atilde:"Latin capital letter A with tilde",
Auml:"Latin capital letter A with diaeresis",Aring:"Latin capital letter A with ring above",AElig:"אות לטינית Æ גדולה",Ccedil:"Latin capital letter C with cedilla",Egrave:"אות לטינית E עם גרש (Grave)",Eacute:"Latin capital letter E with acute accent",Ecirc:"Latin capital letter E with circumflex",Euml:"Latin capital letter E with diaeresis",Igrave:"אות לטינית I עם גרש (Grave)",Iacute:"Latin capital letter I with acute accent",Icirc:"Latin capital letter I with circumflex",Iuml:"Latin capital letter I with diaeresis",
ETH:"אות לטינית Eth גדולה",Ntilde:"Latin capital letter N with tilde",Ograve:"אות לטינית O עם גרש (Grave)",Oacute:"Latin capital letter O with acute accent",Ocirc:"Latin capital letter O with circumflex",Otilde:"Latin capital letter O with tilde",Ouml:"Latin capital letter O with diaeresis",times:"סימן כפל",Oslash:"Latin capital letter O with stroke",Ugrave:"אות לטינית U עם גרש (Grave)",Uacute:"Latin capital letter U with acute accent",Ucirc:"Latin capital letter U with circumflex",Uuml:"Latin capital letter U with diaeresis",
Yacute:"Latin capital letter Y with acute accent",THORN:"אות לטינית Thorn גדולה",szlig:"אות לטינית s חדה קטנה",agrave:"אות לטינית a עם גרש (Grave)",aacute:"Latin small letter a with acute accent",acirc:"Latin small letter a with circumflex",atilde:"Latin small letter a with tilde",auml:"Latin small letter a with diaeresis",aring:"Latin small letter a with ring above",aelig:"אות לטינית æ קטנה",ccedil:"Latin small letter c with cedilla",egrave:"אות לטינית e עם גרש (Grave)",eacute:"Latin small letter e with acute accent",
ecirc:"Latin small letter e with circumflex",euml:"Latin small letter e with diaeresis",igrave:"אות לטינית i עם גרש (Grave)",iacute:"Latin small letter i with acute accent",icirc:"Latin small letter i with circumflex",iuml:"Latin small letter i with diaeresis",eth:"אות לטינית eth קטנה",ntilde:"Latin small letter n with tilde",ograve:"אות לטינית o עם גרש (Grave)",oacute:"Latin small letter o with acute accent",ocirc:"Latin small letter o with circumflex",otilde:"Latin small letter o with tilde",ouml:"Latin small letter o with diaeresis",
divide:"סימן חלוקה",oslash:"Latin small letter o with stroke",ugrave:"אות לטינית u עם גרש (Grave)",uacute:"Latin small letter u with acute accent",ucirc:"Latin small letter u with circumflex",uuml:"Latin small letter u with diaeresis",yacute:"Latin small letter y with acute accent",thorn:"אות לטינית thorn קטנה",yuml:"Latin small letter y with diaeresis",OElig:"Latin capital ligature OE",oelig:"Latin small ligature oe",372:"Latin capital letter W with circumflex",374:"Latin capital letter Y with circumflex",
373:"Latin small letter w with circumflex",375:"Latin small letter y with circumflex",sbquo:"סימן ציטוט נמוך יחיד",8219:"סימן ציטוט",bdquo:"סימן ציטוט נמוך כפול",hellip:"שלוש נקודות",trade:"סימן טריידמארק",9658:"סמן שחור לצד ימין",bull:"תבליט (רשימה)",rarr:"חץ לימין",rArr:"חץ כפול לימין",hArr:"חץ כפול לימין ושמאל",diams:"יהלום מלא",asymp:"כמעט שווה"}); | PypiClean |
/Larango-0.1.3.tar.gz/Larango-0.1.3/src/larango/__init__.py | from importlib import import_module
from django.core.management import ManagementUtility
from larango.commands import BaseCommand
from larango.utils.colors import Color
import importlib
import inspect
import sys
import os
import pkgutil
import django
VERSION = '0.1.3'
def execute_from_command_line(argv=None):
argv = argv or sys.argv[:]
larango_commands = find_larango_commands()
django_commands = find_django_commands()
user_commands = find_user_commands()
all_commands = larango_commands + django_commands + user_commands
try:
command = argv[1]
except IndexError:
command = 'help'
if command=='-h' or command=='--help':
command = 'help'
if command not in all_commands:
os.system('color')
print('Unknown command: ' + Color.red(command))
return
if command in django_commands:
if not is_larango_directory():
os.system('color')
print(Color.red('No Larango Project in current directory'))
return
sys.path.append(os.getcwd())
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.django.settings')
django.setup()
utility = ManagementUtility(argv)
utility.execute()
return
os.system('color')
command_object = load_command(command)
if command_object == None:
print('Error in command: ' + Color.red(command))
return
command_object.create_parser(command)
command_object.add_arguments()
if command_object.need_settings:
if not command_object.load_settings():
print(Color.red('No Larango Project in current directory'))
return
args = command_object.parser.parse_args(argv[2:])
command_object.handle(args)
def find_all_commands():
larango_commands = find_larango_commands()
django_commands = find_django_commands()
user_commands = find_user_commands()
all_commands = larango_commands + django_commands + user_commands
all_commands = list(set(all_commands))
all_commands.sort()
return all_commands
def find_django_commands():
return ['runserver','showmigrations','makemigrations','migrate']
def find_larango_commands():
command_dir = os.path.join(__path__[0], 'commands')
return [name for _, name, is_pkg in pkgutil.iter_modules([command_dir])
if not is_pkg and not name.startswith('_')]
def find_user_commands():
if(is_larango_directory()):
cwd=os.getcwd()
command_dir = os.path.join(cwd, 'commands')
return [name for _, name, is_pkg in pkgutil.iter_modules([command_dir])
if not is_pkg and not name.startswith('_')]
return []
def load_command(command):
module = None
command_object = None
try:
module = import_module('larango.commands.%s' % command)
except:
cwd=os.getcwd()
p=os.path.join(*[cwd,'commands',command+'.py'])
spec = importlib.util.spec_from_file_location(command, p)
module = importlib.util.module_from_spec(spec)
try:
spec.loader.exec_module(module)
except:
pass
if hasattr(module,'Command') and inspect.isclass(module.Command):
command_object = module.Command()
if not isinstance(command_object, BaseCommand):
command_object = None
else:
pass
return command_object
def is_larango_directory():
cwd=os.getcwd()
required_files = ['settings.py', 'asgi.py', 'wsgi.py', 'urls.py']
for required_file in required_files:
if not os.path.isfile(os.path.join(*[cwd,'config','django',required_file])):
return False
return True | PypiClean |
/Mathics_Django-6.0.0-py3-none-any.whl/mathics_django/web/media/js/mathjax/jax/output/SVG/fonts/Latin-Modern/Fraktur/Regular/Main.js | MathJax.OutputJax.SVG.FONTDATA.FONTS.LatinModernMathJax_Fraktur={directory:"Fraktur/Regular",family:"LatinModernMathJax_Fraktur",id:"LATINMODERNFRAKTUR",32:[0,0,332,0,0,""],160:[0,0,332,0,0,""],8460:[667,133,720,-8,645,"645 239c0 -87 -7 -175 -43 -255c-51 -47 -127 -117 -165 -117c-26 0 -51 8 -69 26l48 64c11 -22 33 -37 58 -37c12 0 24 3 32 9c9 6 19 23 26 37c12 26 17 62 21 90c5 45 6 90 6 135c0 38 -2 75 -12 111c-7 26 -16 52 -35 70c-24 24 -58 32 -91 32 c-29 0 -59 -11 -83 -29c-29 -23 -45 -36 -72 -68c21 -52 28 -98 28 -122c0 -18 -2 -38 -10 -51c-32 -41 -106 -134 -125 -150c-15 14 -31 26 -47 36c-12 9 -27 16 -42 16c-29 0 -52 -25 -66 -40l-12 14c29 52 78 106 119 106c32 0 69 -30 89 -54c20 25 22 44 24 69 c0 25 -11 63 -24 87c-23 42 -41 85 -59 130c-11 27 -18 54 -18 83c0 42 26 77 52 109c41 50 92 90 145 127c34 -34 78 -60 128 -60c10 0 21 5 29 11c7 4 35 26 57 43l13 -15c-43 -42 -110 -108 -142 -108c-21 0 -41 5 -60 12c-29 9 -66 27 -92 40c-16 -16 -28 -31 -39 -53 c-8 -16 -10 -40 -10 -58c0 -39 30 -103 52 -149c53 55 115 100 181 138c64 0 129 -14 181 -53c16 -57 27 -116 27 -176"],8488:[729,139,602,11,533,"533 197c0 -88 -31 -172 -80 -245c-69 -57 -155 -91 -245 -91c-32 0 -64 3 -96 11c0 25 0 61 -1 74c-1 10 -2 20 -9 27s-17 11 -27 11c-11 0 -36 -14 -52 -26l-12 15c23 19 48 35 74 48c21 9 42 16 64 16c16 0 35 0 43 -16c5 -10 5 -42 5 -64c0 -17 0 -39 11 -50 s35 -16 51 -16c46 0 79 15 111 47c25 25 44 64 54 97c13 40 18 82 18 124c0 23 -2 46 -8 68c-5 16 -12 32 -24 44c-33 34 -80 48 -127 48c-32 0 -63 -7 -91 -21l-11 11l43 63c44 3 81 28 112 59c30 30 48 70 48 112c0 14 -6 27 -16 37c-16 15 -37 21 -59 21 c-23 0 -46 -3 -69 -5l-80 -5c-21 0 -43 1 -58 16c-7 7 -11 16 -11 26l69 96h16c-4 -7 -8 -16 -8 -24c0 -7 3 -14 8 -19c16 -15 49 -15 75 -15l58 2c16 0 32 3 48 3c18 0 36 -1 53 -5c12 -3 24 -8 32 -16c16 -16 27 -36 27 -59c0 -50 -23 -97 -59 -133 c-34 -34 -74 -65 -113 -89l5 -2c16 3 40 6 55 6c61 0 125 -17 160 -69c11 -37 16 -74 16 -112"],8493:[686,24,612,59,613,"613 568c-35 -17 -78 -36 -117 -41c-27 16 -57 64 -80 101c-13 -4 -24 -11 -32 -21c-16 -20 -17 -38 -17 -47c0 -18 6 -48 13 -68c5 -16 20 -62 20 -93c0 -35 -7 -71 -32 -96c-38 -37 -89 -56 -142 -64l-12 16c28 7 53 22 74 43c16 16 16 45 16 69c0 23 -11 70 -21 107 c-5 19 -9 40 -9 60c0 10 1 20 3 30c-35 -1 -68 -15 -90 -42c-36 -45 -48 -103 -48 -160c0 -84 20 -169 80 -229c59 -60 142 -87 225 -87c34 0 58 6 72 13l86 47v-20c-42 -35 -96 -71 -144 -102c-27 -6 -54 -8 -82 -8c-88 0 -176 28 -238 89c-30 30 -46 66 -59 106 c-16 48 -20 98 -20 148c0 63 15 125 43 181c51 39 106 75 165 101l32 -16c63 45 135 74 207 101l28 -81c4 -9 5 -12 10 -18c4 -4 11 -5 19 -5c16 0 38 5 50 9v-23"],120068:[697,27,717,22,709,"709 85l-144 -112c-31 49 -47 98 -63 150l-203 -150c-14 23 -30 45 -48 64c-24 23 -64 40 -98 40c-37 0 -69 -17 -105 -40l-16 11l112 85c66 49 141 106 181 158c34 44 43 107 43 161c0 47 -14 94 -48 128c-33 33 -76 53 -123 53c-32 0 -62 -12 -85 -35 c-16 -15 -26 -38 -26 -60c0 -10 5 -19 11 -27c9 -14 20 -26 31 -37c15 -15 29 -31 40 -48c7 -12 13 -24 13 -38c0 -26 -14 -50 -31 -70c-24 -27 -54 -47 -86 -63l-16 11c18 10 34 23 47 39c10 12 17 26 17 41c0 10 -5 19 -11 27c-9 13 -20 25 -31 37c-26 26 -48 58 -48 96 c0 40 29 70 57 98c24 24 113 82 193 82c46 0 90 -13 124 -46c33 -34 52 -77 52 -124c0 -68 -18 -136 -48 -197l-178 -164c40 0 65 -21 87 -43l48 -48c12 1 94 53 144 85v325c0 42 -3 88 -5 127l127 96l16 -11c-15 -13 -36 -32 -42 -42c-7 -9 -10 -21 -11 -32 c-5 -54 -5 -156 -5 -234l5 -181c1 -11 2 -23 5 -32c12 -30 25 -61 43 -85c2 -3 6 -4 9 -4c5 0 10 2 14 3c16 7 31 14 46 22"],120069:[691,27,904,49,815,"815 494c-37 -14 -70 -26 -103 -48l-89 -63c110 0 176 -65 176 -159c0 -43 -7 -63 -22 -101c-118 -99 -179 -150 -239 -150c-56 0 -112 21 -162 41c-45 18 -89 34 -125 34c-51 0 -101 -22 -143 -48l-12 21l124 73c125 74 144 130 147 158c3 32 6 103 7 150 c0 162 -95 226 -158 226c-40 0 -56 -7 -79 -30c-16 -15 -18 -36 -18 -57c0 -14 18 -38 38 -63c25 -29 51 -64 51 -89c0 -72 -90 -118 -123 -134l-15 11c27 15 64 48 64 81c0 12 -19 39 -44 70c-23 28 -41 52 -41 90c0 40 23 70 51 98c24 24 90 86 183 86 c58 0 117 -56 144 -111c22 23 53 46 79 64c37 24 78 42 123 42c23 0 47 -4 64 -21c10 -10 16 -37 22 -66c7 -34 14 -71 31 -88c6 -5 17 -6 25 -6c15 2 30 6 44 11v-22zM684 455c-11 5 -17 16 -22 26c-8 19 -13 39 -16 59c-4 26 -12 59 -23 70c-13 13 -35 24 -53 24 c-39 0 -60 -13 -80 -35c-18 -20 -34 -51 -42 -76c6 -25 10 -81 10 -124c0 -14 -1 -35 -2 -49l130 49c30 12 56 27 98 56zM714 175c0 42 -16 82 -46 111c-36 37 -84 55 -135 55c-27 0 -54 -5 -79 -16c-4 -41 -14 -81 -33 -118l-117 -90c59 0 115 -21 170 -43 c42 -16 99 -37 128 -37c26 0 51 9 69 27c30 29 43 70 43 111"],120071:[690,27,831,27,746,"746 410c0 -96 -21 -174 -55 -263c-63 -69 -163 -174 -222 -174c-21 0 -79 22 -112 35c-42 17 -74 27 -117 40c-28 8 -57 16 -85 16c-21 0 -40 -10 -56 -22c-21 -15 -37 -36 -51 -58l-16 16c26 51 63 96 107 133c39 0 79 7 113 28c25 15 47 38 47 68c0 28 -14 53 -32 74 c-7 8 -54 67 -54 85c0 16 1 32 9 46c10 19 24 35 39 50c31 31 68 55 107 75l16 -11c-14 -9 -26 -20 -38 -32c-13 -13 -25 -27 -35 -42c-6 -10 -12 -21 -12 -32c0 -14 25 -51 42 -75c20 -28 48 -69 48 -90c0 -30 -17 -56 -37 -77c-29 -31 -76 -54 -111 -69 c43 -9 92 -25 134 -40c44 -16 117 -43 137 -43c21 0 42 1 58 12c19 13 43 49 59 73s25 52 31 80c8 37 11 74 11 112c0 71 -17 145 -69 197c-67 67 -162 90 -256 90c-83 0 -172 -16 -231 -76c-44 -44 -64 -110 -67 -169l-21 11c0 82 26 157 85 215c30 30 69 55 109 68 c49 16 101 29 152 29c61 0 113 -1 179 -20c43 -12 90 -45 124 -79c48 -48 70 -114 70 -181"],120072:[686,24,662,86,641,"641 568c-36 -17 -80 -36 -119 -41c-26 15 -56 63 -79 100c-12 -4 -22 -14 -31 -23c-8 -11 -17 -24 -18 -38c0 -20 3 -41 9 -60c15 -45 23 -69 23 -137l102 62c21 -29 47 -55 77 -75v-9l-67 -38l-59 42c-5 3 -10 5 -15 5c-14 0 -27 -6 -38 -15c-6 -27 -23 -50 -42 -70 c-15 -15 -34 -25 -53 -32c-26 -11 -53 -17 -80 -21l-16 16c29 5 55 20 80 37c7 8 11 18 13 28c3 14 4 31 4 45c0 44 -11 87 -23 130c-5 19 -9 38 -9 58c0 10 1 21 4 32c-35 -1 -69 -15 -90 -42c-36 -45 -49 -103 -49 -160c0 -84 20 -169 80 -229s143 -87 226 -87 c14 0 27 1 41 2c40 14 88 40 117 58v-20c-45 -37 -94 -70 -144 -102c-27 -5 -54 -8 -82 -8c-88 0 -175 28 -233 84c-30 30 -50 67 -63 107c-16 48 -21 102 -21 152c0 63 15 125 42 181c52 39 106 75 165 101l32 -16c64 45 136 74 208 101l28 -81c4 -9 6 -13 11 -18 c4 -4 12 -5 21 -5c16 0 36 4 48 8v-22"],120073:[686,155,611,11,621,"621 629c-23 -31 -55 -72 -83 -99c-15 -14 -32 -30 -53 -30c-23 0 -50 11 -73 25c-28 -9 -62 -29 -78 -47c-14 -15 -25 -38 -25 -63c0 -18 7 -36 15 -51c13 -24 28 -49 44 -71l122 117c23 -24 43 -49 54 -80l-43 -59c-12 21 -28 43 -53 43c-17 0 -49 -22 -71 -40l87 -112 c16 -23 21 -59 21 -88c0 -52 -22 -101 -59 -138c-55 -55 -129 -91 -207 -91c-33 0 -63 15 -85 38c-27 25 -31 79 -31 109c0 19 2 37 4 56c2 14 3 29 3 44c0 42 -6 54 -14 62c-5 6 -13 8 -21 8c-24 0 -48 -12 -59 -18l-5 21c58 39 105 54 131 54c11 0 21 -3 29 -12 c14 -17 22 -47 22 -73s0 -39 -6 -65c-5 -21 -8 -42 -8 -64c0 -54 17 -78 29 -90c17 -18 39 -35 64 -35c35 0 66 14 90 38c30 30 43 78 43 119c0 24 -10 47 -23 69c-22 36 -43 72 -67 107c-22 30 -43 61 -60 94c-10 19 -20 39 -20 60c0 33 19 61 39 85c31 37 69 66 110 91 c-35 21 -71 40 -109 55c-25 10 -51 19 -78 19c-26 0 -50 -13 -69 -32c-32 -32 -44 -78 -53 -122l-16 11c6 58 22 117 64 159c35 35 83 53 133 53c30 0 58 -10 85 -21c43 -18 83 -40 123 -64c19 -11 41 -21 64 -21c24 0 60 37 84 63"],120074:[692,25,785,66,711,"711 303c0 -79 -37 -152 -93 -207l-154 -104c-31 -11 -51 -17 -83 -17c-86 0 -179 33 -231 85c-68 68 -84 164 -84 259c0 37 4 70 13 106c8 28 19 52 31 77c50 36 102 70 162 99l32 -16l112 75l10 -11c-27 -23 -53 -53 -53 -90c0 -26 16 -96 26 -137l51 27 c33 22 78 57 107 86c-47 16 -94 53 -103 93l68 64c12 -24 27 -45 47 -62c16 -13 34 -23 54 -23c14 0 30 7 45 22l4 -23l-176 -154c16 2 30 3 45 3c57 0 97 -23 114 -37c33 -27 56 -70 56 -115zM624 255c0 37 -23 76 -48 99c-34 31 -76 48 -124 48c-16 0 -33 -2 -50 -6 c0 -44 -5 -93 -37 -125c-36 -35 -82 -50 -130 -58l-16 16c30 7 56 27 74 53c12 17 18 45 18 66c0 32 -4 59 -12 88c-9 33 -18 65 -18 97c0 10 1 20 3 31c-33 -2 -61 -11 -84 -36c-40 -42 -54 -103 -54 -161c0 -88 21 -177 83 -239c52 -52 113 -89 186 -89c51 0 95 7 140 44 c52 43 69 113 69 172"],120077:[686,139,552,-18,522,"522 591c-52 -27 -129 -65 -160 -75c-30 6 -55 24 -79 43c-27 21 -56 43 -91 43c-24 0 -47 -10 -64 -25c-25 -22 -43 -52 -59 -81l-15 10c19 48 44 93 79 131c24 26 54 49 91 49c18 0 35 -7 51 -16c24 -13 45 -30 66 -47c14 -12 29 -23 45 -32c11 -6 23 -11 35 -11 c36 0 69 16 101 32v-21zM485 522c-22 -22 -42 -44 -57 -72c-11 -19 -12 -45 -12 -67c0 -36 5 -78 12 -117c13 -60 30 -160 30 -181c0 -59 -28 -113 -69 -154c-40 -41 -92 -70 -149 -70c-36 0 -75 6 -101 32c-22 23 -28 55 -32 85c-3 28 -8 66 -16 80s-22 22 -37 22 c-20 0 -50 -9 -67 -20l-5 13c22 18 53 41 78 54c18 9 37 17 58 17c17 0 35 -4 47 -16c16 -16 16 -49 16 -75s2 -52 7 -78c4 -16 9 -35 20 -46c18 -19 44 -32 69 -32c26 0 50 8 68 27c28 27 39 74 39 113c0 53 -13 110 -27 160c-15 52 -28 113 -28 162c0 10 1 20 2 29 c39 54 86 102 138 144"],120078:[681,27,668,16,690,"690 60c-22 -22 -53 -50 -80 -66c-17 -11 -35 -21 -56 -21c-20 0 -40 11 -50 27c-16 25 -21 54 -25 85c-5 44 -16 102 -37 122c-16 17 -40 22 -64 22c-31 0 -62 -9 -90 -22c0 -26 -1 -53 -5 -79c-25 -38 -54 -73 -88 -102c-28 -24 -54 -40 -88 -51c-39 3 -70 27 -91 57 l57 66h10c7 -15 16 -30 28 -42c10 -11 26 -20 41 -20c21 0 36 9 44 17c20 20 23 58 23 91c0 50 -12 100 -27 149c-13 43 -32 106 -32 134c0 13 5 28 6 30c21 53 55 99 95 139c55 55 130 85 208 85c31 0 62 -5 91 -17c24 -10 45 -25 58 -47c0 -6 1 -12 1 -18 c0 -33 -7 -67 -17 -99l-16 -5c0 34 -13 67 -37 91c-36 36 -84 48 -139 48c-58 0 -98 -17 -132 -51c-19 -19 -37 -64 -37 -93c0 -34 7 -83 20 -123c35 37 71 71 114 98c28 19 60 35 94 35c16 0 31 -4 43 -16c17 -16 29 -48 39 -75c6 -16 19 -50 56 -50c5 0 15 2 21 5l6 -16 l-168 -76v-1c15 0 33 -4 46 -16c19 -18 28 -61 36 -103c5 -25 11 -60 29 -80c9 -10 21 -19 36 -19c24 0 54 9 74 24zM508 314c-22 13 -32 46 -39 69c-5 16 -21 35 -33 44c-9 6 -26 9 -33 9c-26 0 -53 -13 -73 -30c-28 -22 -45 -49 -63 -79c11 -28 17 -66 22 -97 c39 18 75 32 119 40c33 13 68 28 100 44"],120079:[686,27,666,32,645,"645 186c-7 -38 -15 -78 -29 -116c-9 -23 -18 -47 -35 -65c-28 -28 -69 -32 -107 -32c-40 0 -79 9 -118 20c-56 16 -110 37 -164 60c-20 9 -42 16 -64 16c-30 0 -60 -43 -84 -84l-12 15c20 47 38 88 71 127c8 9 27 14 40 16h31c16 5 48 23 61 39c13 15 18 30 18 45 c0 23 -27 76 -45 108c-13 25 -26 51 -36 78c-6 18 -12 36 -12 55c0 23 9 44 19 63c15 26 34 49 56 70c56 57 133 85 213 85c36 0 72 -6 98 -32c20 -19 26 -47 26 -83c0 -11 -1 -22 -2 -33c-2 -19 -2 -47 -2 -55c0 -7 4 -14 8 -19c10 -10 41 -12 59 -12v-18l-117 -30h-3 c-14 0 -40 21 -40 46c0 12 1 32 5 61c1 11 2 21 2 31c0 29 -5 53 -24 72c-22 22 -52 30 -82 30c-40 0 -78 -14 -106 -42c-20 -20 -30 -52 -30 -80c0 -20 4 -37 10 -55c8 -26 21 -54 33 -79c17 -35 42 -90 42 -121c0 -7 -2 -13 -5 -17c-38 -46 -90 -104 -111 -118 c45 -19 101 -40 149 -52c34 -9 70 -16 106 -16c42 0 86 6 117 37c26 26 38 61 48 96"],120080:[692,27,1049,27,1049,"1049 554c-41 -21 -79 -48 -111 -80c-13 -26 -20 -54 -24 -82c-7 -38 -8 -77 -8 -115c0 -47 0 -107 10 -139c7 -21 9 -24 24 -40c6 -8 22 -18 30 -18c6 0 37 10 58 21v-19l-144 -109l-57 126l-1 172c0 43 7 86 16 128c22 34 49 64 80 91c-7 1 -15 9 -22 16 c-5 5 -15 35 -21 53c-7 21 -17 51 -27 58c-9 8 -20 11 -32 11c-25 0 -56 -8 -75 -25c-22 -19 -34 -31 -50 -57c6 -31 14 -79 14 -110c0 -106 -23 -211 -64 -308l-123 -150c-18 17 -50 43 -69 43c-9 0 -19 0 -26 -5c-13 -7 -21 -16 -33 -30l-13 14c25 36 67 90 88 101 c5 3 10 3 16 3c7 0 14 -1 21 -3c9 -2 42 -24 58 -35c24 24 31 53 38 83c16 64 16 145 16 218c0 43 -1 85 -8 127c-4 27 -9 53 -24 75c-15 23 -43 32 -69 32c-21 0 -44 -9 -58 -23c-15 -13 -26 -31 -35 -49c7 -28 10 -75 10 -82c0 -113 -13 -227 -72 -325l-170 -149 c-14 15 -30 28 -48 39c-11 8 -23 14 -37 14c-29 0 -52 -20 -69 -42l-11 16c17 28 37 55 61 78c16 14 34 28 56 28c15 0 29 -6 42 -14c15 -9 41 -31 55 -44c17 13 34 28 52 46c26 26 36 69 43 104c13 56 17 112 17 169v41c-1 52 -13 111 -40 148c-27 38 -71 74 -118 74 c-39 0 -59 -8 -78 -23c-13 -11 -21 -34 -21 -43c0 -33 23 -66 38 -83c11 -14 30 -37 40 -54c6 -11 13 -28 13 -38c0 -29 -14 -52 -32 -72c-22 -25 -59 -46 -91 -62l-15 11c18 10 36 26 49 42c10 12 15 24 15 39c0 10 -7 19 -13 27l-31 43c-23 28 -41 52 -41 90 c0 40 23 70 51 98c24 24 97 86 183 86c46 0 89 -34 119 -71c13 -16 24 -37 33 -59c3 2 5 5 7 8c14 18 29 36 48 48c35 24 74 43 117 43c23 0 47 -5 64 -21c17 -18 28 -39 36 -62c22 26 48 48 76 67c35 25 73 48 117 48c18 0 36 -8 48 -21c11 -13 20 -38 27 -59 c5 -15 10 -32 21 -43c8 -7 19 -9 29 -9c16 0 31 4 45 9v-15"],120081:[686,29,832,29,830,"830 76l-144 -105l-64 133l1 167c0 40 0 96 6 117c2 8 6 15 10 22c21 28 40 48 65 74c-18 4 -26 15 -32 23c-8 11 -15 33 -22 57c-5 18 -14 35 -27 48c-10 10 -23 16 -37 16c-31 0 -65 -18 -87 -39c-31 -32 -45 -51 -66 -88c7 -42 15 -107 15 -150 c0 -78 -7 -159 -43 -229c-5 -9 -13 -24 -16 -26c-42 -37 -83 -68 -131 -100c-12 -7 -35 -18 -48 -18c-35 0 -68 30 -86 58l56 65h12c5 -13 14 -29 27 -43c11 -11 29 -18 44 -18c28 0 49 20 68 39c18 18 25 44 31 69c9 39 11 78 11 118c0 121 -28 241 -59 284 c-27 38 -71 72 -118 72c-40 0 -61 -4 -83 -25c-12 -11 -17 -25 -17 -46c0 -20 24 -59 39 -76c11 -14 30 -37 40 -54c6 -11 13 -28 13 -38c0 -29 -14 -52 -32 -72c-22 -25 -59 -46 -91 -62l-15 11c18 10 36 26 49 42c10 12 15 24 15 39c0 10 -7 19 -13 27l-31 43 c-23 28 -41 52 -41 90c0 40 23 70 51 98c24 24 90 86 183 86c46 0 89 -34 119 -71c16 -19 30 -50 40 -81c13 15 46 61 52 71c30 48 93 82 149 82c17 0 33 -1 49 -5c12 -3 23 -7 31 -16c13 -13 21 -34 27 -53c4 -15 13 -40 16 -43c9 -8 19 -15 32 -15c15 0 32 4 45 11v-14 c-30 -21 -61 -44 -88 -67c-10 -8 -19 -19 -21 -32c-12 -57 -16 -116 -16 -175c0 -42 3 -84 14 -125c7 -28 15 -46 36 -67c4 -4 13 -9 19 -9c22 0 46 13 63 22v-22"],120082:[729,27,828,11,746,"746 399c0 -59 -6 -118 -24 -174c-13 -42 -33 -84 -67 -113c-44 -40 -91 -76 -144 -103c-38 -20 -79 -36 -122 -36c-55 0 -109 12 -160 32c-17 7 -35 15 -53 21c-17 6 -35 11 -53 11c-39 0 -74 -21 -96 -53l-16 16c28 37 58 73 91 106c44 15 85 51 85 101 c0 15 -5 29 -11 43c-9 19 -20 36 -32 53c-17 25 -45 63 -52 74c-6 10 -12 21 -12 33c0 35 15 67 36 95c26 35 60 63 98 86l21 -11c-19 -14 -37 -30 -51 -50c-10 -13 -19 -29 -19 -46s6 -33 13 -49c9 -24 22 -46 36 -68c20 -34 47 -77 47 -106c0 -26 -11 -49 -25 -70 c-19 -30 -49 -56 -76 -79c12 0 28 -4 39 -8c32 -10 63 -23 94 -35c51 -19 111 -33 167 -33c74 0 132 37 162 105c26 57 33 127 33 189c0 44 -4 88 -16 131c-10 32 -24 63 -48 87c-17 18 -40 28 -64 36c-31 9 -63 12 -95 12l-86 -3c-19 0 -39 3 -56 13c-12 7 -23 18 -23 33 c0 40 45 59 69 90h16c-5 -7 -10 -17 -10 -26c0 -16 20 -23 36 -27c19 -5 85 -10 128 -14c35 -3 69 -9 102 -20c25 -9 50 -22 68 -41c53 -53 70 -128 70 -202"],120083:[692,219,823,6,804,"700 138c0 16 -8 37 -14 52c-10 21 -22 41 -36 60c-21 30 -48 67 -48 96c0 20 11 38 22 54c18 24 43 45 66 64c-13 3 -29 15 -40 26c-13 12 -15 41 -16 64c-1 22 -3 51 -16 63c-12 13 -31 16 -48 16c-24 0 -45 -5 -64 -18c-44 -30 -67 -61 -96 -106v-387h1l145 -48 c19 -5 48 -12 63 -12c24 0 47 8 63 25c14 14 18 31 18 51zM804 522c-31 -16 -60 -34 -85 -59c-11 -12 -22 -24 -31 -37c-6 -8 -12 -17 -12 -27c0 -29 20 -59 38 -85c20 -31 37 -64 37 -101c0 -69 -33 -132 -85 -176c-31 -25 -66 -48 -107 -48c-10 0 -21 3 -31 6l-118 42 c0 -62 3 -124 11 -186l-96 -70l-21 11c5 12 12 56 16 85c9 62 11 123 11 186c-33 10 -63 17 -80 17c-27 0 -51 -14 -70 -32c-19 -20 -36 -42 -53 -64l-10 21c19 38 39 76 69 107c25 25 59 42 96 42c13 0 30 -3 48 -8v250c0 54 -12 115 -40 154c-27 38 -71 72 -118 72 c-39 0 -64 -7 -84 -25c-12 -12 -18 -25 -18 -46c0 -25 26 -59 41 -76c11 -14 30 -37 40 -54c6 -11 13 -28 13 -38c0 -29 -14 -52 -32 -72c-22 -25 -59 -46 -91 -62l-15 11c18 10 36 26 49 42c10 12 15 24 15 39c0 10 -7 19 -13 27l-31 43c-23 28 -41 52 -41 90 c0 40 23 70 51 98c24 24 89 86 183 86c46 0 89 -34 119 -71c17 -20 30 -47 39 -74c2 3 5 6 7 8c18 23 37 45 59 64c50 43 108 80 175 80c20 0 40 -7 54 -21c17 -18 23 -59 26 -91c1 -13 4 -30 11 -37c9 -9 20 -13 32 -13s25 4 37 8"],120084:[729,69,828,11,783,"783 -6l-117 -63c-33 10 -65 23 -96 40c-20 11 -40 23 -57 39l-2 -1c-38 -20 -79 -36 -122 -36c-55 0 -109 12 -160 32c-17 7 -35 15 -53 21c-17 6 -35 11 -53 11c-39 0 -74 -21 -96 -53l-16 16c28 37 58 73 91 106c44 15 85 51 85 101c0 15 -5 29 -11 43 c-9 19 -20 36 -32 53c-17 25 -45 63 -52 74c-6 10 -12 21 -12 33c0 35 15 67 36 95c26 35 60 63 98 86l21 -11c-19 -14 -37 -30 -51 -50c-10 -13 -19 -29 -19 -46s6 -33 13 -49c9 -24 22 -46 36 -68c20 -34 47 -77 47 -106c0 -26 -11 -49 -25 -70c-19 -30 -52 -59 -77 -78 c12 0 29 -5 40 -9c32 -10 63 -23 94 -35c51 -19 108 -32 163 -32c74 0 139 41 169 109c26 57 30 122 30 184c0 44 -4 88 -16 131c-10 32 -24 63 -48 87c-17 18 -40 28 -64 36c-31 9 -63 12 -95 12l-86 -3c-19 0 -39 3 -56 13c-12 7 -23 18 -23 33c0 8 0 21 11 32l58 58h16 c-5 -7 -10 -17 -10 -26c0 -4 1 -7 4 -11c8 -9 21 -13 32 -16c19 -5 85 -10 128 -14c35 -3 69 -9 102 -20c25 -9 50 -22 68 -41c53 -53 70 -128 70 -202c0 -103 -22 -225 -91 -287c-17 -15 -34 -30 -52 -44c18 -20 35 -39 57 -54c15 -10 31 -20 49 -20c27 0 52 12 74 27v-27"],120086:[689,27,828,56,756,"756 655c-8 -32 -24 -62 -47 -86c-28 -28 -65 -44 -102 -58c-31 3 -86 35 -127 58c-28 17 -57 32 -87 44c-18 8 -37 15 -57 15c-50 0 -93 -29 -128 -64c-49 -49 -69 -118 -69 -186c0 -98 31 -196 101 -266c56 -57 136 -86 215 -86c64 0 129 5 175 51c32 33 47 84 47 130 c0 37 -11 76 -38 102c-22 22 -53 32 -85 32s-75 -9 -112 -16c-38 -8 -79 -16 -117 -16c-25 0 -51 3 -69 21c-16 16 -27 36 -27 58c0 29 13 55 32 75c22 21 49 37 78 45l13 -13c-13 -4 -23 -12 -32 -21c-12 -13 -16 -31 -16 -48c0 -16 5 -32 16 -43c12 -12 30 -16 48 -16 c22 0 78 13 117 21c42 10 95 22 128 22c39 0 78 -15 106 -43c24 -24 32 -57 32 -90c0 -74 -28 -145 -80 -197c-78 -78 -189 -107 -298 -107c-76 0 -155 20 -210 75c-68 68 -107 164 -107 261s39 186 108 255c67 68 168 106 261 125c53 -28 119 -62 156 -78 c24 -10 48 -20 74 -20c22 0 39 6 55 21c15 15 22 38 27 54"],120087:[703,27,669,24,676,"676 681l-69 -90c-21 -13 -45 -16 -69 -16c-38 0 -91 5 -128 14c-23 -18 -30 -25 -47 -51c-12 -18 -27 -52 -27 -75c0 -13 5 -25 11 -36c8 -16 19 -30 31 -44c21 -23 40 -48 56 -74c10 -17 19 -34 19 -54c0 -25 -14 -47 -29 -66c-25 -33 -56 -60 -87 -86 c43 -20 98 -45 137 -45c22 0 43 9 62 20c26 14 50 34 71 55l11 -16c-28 -32 -57 -62 -89 -89c-25 -22 -52 -41 -81 -55c-44 6 -84 27 -123 48c-28 15 -58 27 -90 27c-19 0 -37 -7 -54 -15c-25 -12 -47 -27 -69 -44l-10 16c38 40 79 77 122 112c10 1 20 1 30 1 c19 0 37 -1 55 -6c21 12 40 28 55 47c11 14 20 30 20 48c0 14 -6 26 -13 37c-10 18 -22 34 -35 49c-30 34 -69 79 -75 111c8 34 23 64 41 93c24 36 51 69 82 99c-52 18 -106 27 -160 27c-45 0 -92 -8 -124 -40c-17 -17 -26 -43 -26 -67c0 -39 29 -71 65 -85l-64 -64 c-32 25 -51 58 -51 98c0 55 41 118 82 156c49 46 110 65 177 65c57 0 114 -9 170 -21c39 -7 80 -16 117 -16c42 0 79 23 106 54v-22"],120088:[697,27,645,-26,666,"666 80l-144 -107l-67 147l-114 -126c-17 -13 -37 -21 -58 -21c-45 0 -87 16 -128 32c-38 15 -80 32 -117 32c-9 0 -17 -6 -23 -11c-12 -9 -22 -20 -31 -32l-10 17l80 90c24 2 50 28 69 48c26 27 37 64 37 101c0 19 -6 37 -12 54c-10 26 -22 50 -36 74 c-23 40 -53 93 -53 128c0 20 10 39 21 56c16 26 37 49 59 71c24 24 50 47 80 64l10 -16c-15 -11 -29 -24 -42 -37c-25 -24 -43 -56 -43 -90c0 -32 20 -73 37 -107c14 -28 27 -57 36 -86c7 -23 12 -45 12 -68c0 -26 -4 -52 -15 -75l-101 -112c29 0 69 -14 101 -26 c33 -14 80 -32 101 -32c31 0 57 20 79 42c23 23 51 51 59 75c5 16 5 56 5 85c0 116 -2 231 -10 346l127 90l16 -10c-18 -18 -34 -39 -42 -64c-16 -48 -16 -99 -16 -149v-202c0 -22 1 -43 5 -64c8 -38 17 -77 43 -107c6 -7 15 -11 25 -11c24 0 50 15 60 22v-21"],120089:[686,27,831,29,826,"735 548c-29 0 -55 16 -80 32c-27 18 -56 37 -90 37c-29 0 -66 -36 -66 -36c-28 -27 -50 -59 -66 -95c0 -100 -14 -182 -55 -262l-93 -73c39 -3 69 -22 131 -61c66 -41 114 -58 143 -58c26 0 51 7 70 25c14 15 19 35 23 54c7 32 9 64 9 96c0 24 -3 57 -6 86 c-2 33 -5 67 -5 101c0 23 4 47 11 69zM826 607c-16 -11 -30 -24 -43 -38c-36 -35 -58 -82 -58 -133c0 -34 5 -67 10 -101c7 -39 16 -96 16 -117c0 -31 -7 -62 -21 -90c-11 -22 -23 -42 -43 -57l-117 -85c-14 -11 -35 -13 -53 -13c-30 0 -76 9 -149 53 c-20 12 -110 64 -154 64c-49 0 -92 -27 -123 -64l-11 16c38 43 80 82 128 112c46 0 87 26 112 64c14 22 20 47 24 73c6 37 10 74 10 112c-1 52 -13 110 -40 148s-71 72 -118 72c-40 0 -65 -9 -85 -27c-10 -8 -15 -23 -15 -44c0 -24 24 -59 39 -76c11 -14 30 -37 40 -54 c6 -11 13 -28 13 -38c0 -29 -14 -52 -32 -72c-22 -25 -59 -46 -91 -62l-15 11c18 10 36 26 49 42c10 12 15 24 15 39c0 10 -7 19 -13 27l-31 43c-23 28 -41 52 -41 90c0 40 23 70 51 98c24 24 90 86 183 86c46 0 89 -34 119 -71c17 -21 34 -60 43 -89c13 22 28 44 55 75 c22 25 49 46 78 63c20 12 42 22 65 22c36 0 68 -16 96 -37c38 -27 65 -32 81 -32c9 0 18 2 26 6v-16"],120090:[686,28,1046,21,1055,"954 548c-18 0 -51 22 -75 37c-26 17 -54 32 -85 32c-22 0 -42 -10 -58 -23c-26 -21 -43 -38 -61 -64c2 -33 4 -68 4 -103c0 -79 -8 -156 -29 -227c-14 -19 -56 -75 -79 -99c43 0 116 -33 170 -59c16 -7 34 -10 53 -10c32 0 58 15 74 42c11 18 18 45 18 112 c0 39 -3 79 -7 117c-3 29 -5 59 -5 88c0 39 4 63 10 77c3 6 7 12 11 17zM607 362c0 52 -7 104 -29 152c-16 37 -45 75 -82 91c-28 -25 -57 -51 -78 -80c1 -9 7 -38 7 -49c0 -15 -4 -87 -18 -148c-11 -48 -27 -95 -55 -137c-42 -34 -81 -59 -118 -81c45 -35 103 -68 160 -68 c59 0 114 28 155 70c25 24 35 59 43 93c12 51 15 104 15 157zM1055 628l-6 -21c-32 -24 -62 -50 -80 -85c-12 -28 -15 -54 -15 -88c0 -23 2 -45 5 -67c6 -51 10 -103 10 -154c0 -25 -2 -50 -9 -74c-6 -18 -14 -34 -28 -47c-42 -40 -89 -75 -138 -106c-20 -6 -40 -11 -60 -11 c-30 0 -61 7 -89 19c-48 21 -101 43 -149 43c-41 -19 -81 -39 -118 -64c-5 0 -11 -1 -16 -1c-45 0 -90 16 -127 39c-27 17 -62 37 -86 37c-11 0 -34 -8 -46 -16c-21 -14 -39 -26 -57 -43l-14 16c37 43 78 82 123 117h3c47 0 92 20 125 53c25 25 38 58 47 92 c12 43 16 89 16 135c-1 52 -13 111 -40 149s-71 72 -118 72c-40 0 -65 -8 -86 -27c-12 -11 -14 -20 -14 -42c0 -30 24 -61 39 -78c11 -14 30 -37 40 -54c6 -11 13 -28 13 -38c0 -29 -14 -52 -32 -72c-22 -25 -59 -46 -91 -62l-15 11c18 10 36 26 49 42c10 12 15 24 15 39 c0 10 -7 19 -13 27l-31 43c-23 28 -41 52 -41 90c0 40 23 70 51 98c24 24 90 86 183 86c46 0 89 -34 119 -71c14 -17 26 -40 35 -64l12 13c42 46 89 85 138 122c53 -30 92 -96 107 -127c32 34 66 67 104 94c23 17 48 33 77 33c17 0 33 -5 48 -12c23 -9 44 -22 64 -35 c18 -12 38 -19 59 -19c12 0 25 3 37 8"],120091:[689,27,719,27,709,"709 585l-102 -58c-20 9 -37 22 -53 37c-15 15 -37 38 -42 52c-15 -10 -31 -23 -42 -37c-17 -23 -31 -51 -42 -79c-17 -42 -23 -87 -23 -133h176l-37 -42c-46 6 -93 7 -139 8c0 -41 2 -82 11 -115c11 -47 29 -93 64 -128c24 -24 56 -37 90 -37c39 0 74 21 106 43v-22 l-111 -90c-16 -5 -37 -11 -48 -11c-34 0 -67 14 -91 38c-38 38 -58 90 -69 143l-42 -42c-33 -33 -68 -65 -107 -91c-18 -12 -36 -23 -56 -32c-12 -5 -26 -11 -40 -11c-36 0 -66 24 -85 54l59 69c15 -31 44 -53 79 -53c38 0 73 21 99 47c41 41 55 99 67 155c4 20 5 51 5 83 c-59 0 -117 0 -176 -8l37 42h139c0 36 0 71 -5 107c-5 34 -17 62 -42 87c-17 18 -40 33 -65 33c-32 0 -57 -7 -80 -30c-17 -17 -37 -37 -49 -52l-17 15l148 159c44 -2 89 -16 120 -47c13 -13 22 -30 29 -46c11 -25 18 -51 25 -77c15 31 34 61 58 85c38 38 81 68 130 88 c12 -23 22 -47 34 -69c3 -7 7 -13 12 -19c9 -8 20 -13 32 -13c11 0 29 9 43 16v-19"],120092:[686,219,834,26,741,"741 341c0 -69 -5 -137 -17 -204c-9 -53 -23 -105 -48 -153c-44 -67 -106 -120 -175 -160c-43 -24 -89 -43 -139 -43c-29 0 -58 6 -84 19c-21 11 -40 28 -49 51l67 71h10c10 -50 56 -93 110 -93c61 0 96 21 135 60c25 24 46 64 62 95c20 41 29 86 35 131 c10 66 13 132 13 199c0 72 -4 144 -27 213c-26 29 -54 59 -90 74c-8 -3 -76 -35 -116 -89c3 -14 4 -30 4 -44c-2 -91 -18 -183 -70 -261l-116 -92c24 -7 52 -21 74 -32c26 -13 53 -25 80 -25c34 0 63 23 85 48l9 -16c-23 -30 -46 -59 -73 -85c-14 -12 -33 -26 -51 -26 c-30 0 -76 23 -107 41c-37 21 -76 34 -100 34c-46 0 -90 -32 -126 -58l-11 15c37 39 82 78 122 113c22 0 52 0 68 -2c18 10 45 32 67 53c25 25 40 58 51 92c13 42 19 86 19 130c0 53 -12 114 -40 154c-27 38 -71 72 -118 72c-39 0 -69 -13 -87 -27c-13 -12 -14 -24 -14 -45 s25 -58 40 -75c11 -14 30 -37 40 -54c6 -11 13 -28 13 -38c0 -29 -14 -52 -32 -72c-22 -25 -59 -46 -91 -62l-15 11c18 10 36 26 49 42c10 12 15 24 15 39c0 10 -7 19 -13 27l-31 43c-23 28 -41 52 -41 90c0 40 23 70 51 98c24 24 89 86 183 86c46 0 89 -34 119 -71 c17 -21 31 -49 40 -78l5 5c27 27 56 52 86 75c33 27 68 51 106 69c29 -36 68 -64 107 -90c16 -80 16 -170 16 -255"],120094:[471,36,500,65,497,"497 83l-123 -110l-43 73l-2 45l-175 -127c-30 32 -59 66 -87 101c-2 30 -2 61 -2 91c0 70 5 141 20 209l169 106c28 -19 60 -33 93 -40l67 34l8 -6c-10 -67 -15 -135 -15 -203l1 -137l36 -45c21 7 32 13 46 22zM329 119l-1 246c-47 0 -96 13 -142 27 c-24 -25 -30 -62 -35 -97c-4 -31 -6 -62 -6 -94c0 -27 1 -53 3 -80c13 -17 39 -44 60 -65c0 0 4 -1 7 -1c15 0 54 23 114 64"],120095:[686,31,513,86,444,"444 313c0 -72 -5 -145 -20 -216c-73 -47 -148 -92 -228 -128c-37 33 -86 77 -110 102c14 73 14 157 14 236l-7 361l17 4l17 -77l8 4l120 87l17 -13l-30 -26c-44 -38 -60 -91 -63 -148l-2 -134l7 -2c45 38 92 77 140 107c38 -22 77 -43 117 -61c2 -32 3 -64 3 -96z M363 227c0 41 0 72 -4 113c-9 12 -32 24 -85 46c-31 -4 -69 -33 -96 -54v-209c32 -33 80 -81 107 -81c16 0 29 11 40 22c16 17 23 39 28 61c8 34 10 68 10 102"],120096:[466,29,389,72,359,"359 92l-172 -121c-40 34 -76 73 -110 113c-3 45 -5 89 -5 134c0 29 0 89 12 145l165 103c39 -3 75 -22 108 -44l-52 -79l-9 -3c-33 29 -83 54 -120 54c-18 -30 -21 -99 -21 -148c0 -35 2 -69 7 -99c18 -29 53 -61 84 -89c32 10 71 31 105 51"],120097:[612,34,498,13,430,"430 328c0 -31 -1 -61 -3 -79c-4 -48 -8 -96 -26 -140l-187 -143c-50 34 -99 69 -138 115c-2 30 -3 57 -3 85l6 169c42 40 88 83 132 112l19 -4c-46 -50 -57 -77 -65 -97c-9 -23 -13 -56 -13 -92c0 -39 4 -83 7 -119c13 -16 29 -29 45 -41c23 -16 47 -30 72 -43 c3 -1 5 -2 8 -2c5 0 11 3 15 10c28 44 42 95 47 146c2 21 3 41 3 62c0 44 -4 88 -12 132c-39 41 -82 79 -131 109c-33 21 -70 38 -110 38c-8 0 -21 0 -24 -1l-46 -18l-13 16c43 24 93 48 138 69c90 -12 192 -88 276 -157"],120098:[467,31,400,70,364,"364 358c-69 -66 -155 -146 -213 -192c0 -9 2 -18 5 -26c3 -14 60 -61 100 -79c4 1 18 1 23 3l74 39l5 -16c-54 -42 -110 -82 -169 -118c-40 37 -80 74 -114 116c-4 34 -5 69 -5 103c0 60 5 119 14 178l162 101c33 -42 74 -78 118 -109zM284 312c-38 24 -76 53 -108 84 l-2 1c-8 0 -14 -22 -16 -28c-9 -32 -10 -123 -10 -176c46 34 98 78 136 119"],120099:[679,238,329,30,324,"324 388l-12 -37h-106l-4 -206c0 -67 0 -151 -13 -198c-10 -34 -25 -67 -43 -97l-50 -88l-19 7l26 90c7 23 12 48 14 72l3 197l-1 223h-89l15 35h73l-14 67c-6 25 -13 61 -13 75c0 44 24 81 55 112c14 14 29 27 46 39c42 0 92 -7 124 -40l-47 -92l-10 -1 c-8 17 -18 33 -31 46c-13 12 -30 23 -47 23c-13 0 -29 -3 -39 -12c-9 -10 -12 -22 -12 -35c0 -17 12 -41 21 -59l50 -95c5 -8 7 -17 9 -26h114"],120100:[470,209,503,16,455,"455 -17c-51 -64 -104 -127 -163 -184c-12 -4 -28 -8 -36 -8c-32 0 -63 9 -92 21c-37 16 -71 38 -102 63l-35 -19l-11 13c45 31 89 68 134 107l-72 89c-2 34 -4 69 -4 103c0 67 5 134 15 200c53 37 107 70 162 102c34 -23 74 -36 114 -45l72 40l6 -6 c-14 -29 -16 -62 -18 -94c-4 -55 -4 -143 -4 -215c0 -14 2 -28 5 -41zM369 -45c0 22 -16 95 -27 142c-67 -47 -133 -99 -195 -152c23 -23 49 -45 77 -60c22 -12 46 -20 71 -20c20 0 39 10 53 24c15 16 21 45 21 66zM349 351c-58 8 -128 21 -176 43c-16 -40 -18 -90 -18 -134 c0 -45 1 -91 5 -136c15 -29 43 -60 68 -83l112 82c0 76 2 153 9 228"],120101:[689,198,521,76,435,"435 246c0 -54 -2 -109 -7 -162c-4 -39 -9 -78 -20 -115c-45 -66 -102 -119 -164 -167l-16 14l64 55c11 8 19 19 26 31c17 34 23 72 28 109c8 60 10 121 10 182c0 28 -6 107 -10 161c-26 19 -55 36 -88 36c-31 0 -69 -29 -94 -49l-5 -215l54 -77l-82 -75l-55 91 c7 122 11 244 11 366c0 81 -1 163 -6 244l12 2l24 -76l121 88l17 -11c-83 -77 -90 -142 -90 -197l-1 -110c33 26 98 72 148 107c33 -30 74 -48 116 -61c4 -57 7 -114 7 -171"],120102:[675,21,279,14,268,"268 83c-39 -37 -81 -72 -123 -104l-54 76c3 53 3 93 3 139l-4 173l-21 24c-11 -4 -29 -12 -42 -22l-13 13c34 28 71 61 105 93c19 -20 38 -39 60 -55c-7 -60 -8 -120 -8 -181c0 -39 1 -79 5 -111c2 -19 19 -40 32 -53c4 0 10 1 13 3c15 7 33 18 40 24zM187 616l-57 -60 c-21 19 -40 40 -58 61c20 20 42 37 60 58c18 -21 40 -47 55 -59"],120103:[673,202,280,-9,196,"196 422l-6 -396c-1 -25 -5 -51 -22 -70c-45 -55 -98 -107 -161 -158l-16 18c29 22 58 45 83 71c13 13 21 30 26 47c16 56 18 179 18 269c0 50 0 119 -10 149c-6 17 -25 37 -32 37l-8 -3l-29 -19l-14 13l106 95c22 -16 43 -34 65 -53zM183 615l-58 -60 c-18 15 -37 40 -54 61l61 57c16 -21 32 -40 51 -58"],120104:[686,26,389,24,363,"363 95c-43 -37 -111 -82 -168 -121l-107 95c4 19 6 40 6 58v181h-70l9 35h62c0 227 -1 262 -7 326l17 4c8 -24 18 -80 18 -80c41 32 86 68 130 93l16 -10c-26 -24 -56 -52 -67 -73c-19 -32 -26 -69 -29 -105c42 32 83 57 127 87c17 -16 32 -34 44 -54 c9 -15 16 -31 16 -48c0 -18 -10 -36 -23 -49l-86 -91h95l-14 -35h-159v-180c24 -24 54 -48 82 -68c32 16 66 29 95 50zM288 446c0 13 -6 24 -12 35c-10 14 -22 26 -35 37c-20 -10 -39 -23 -51 -42c-9 -16 -12 -34 -14 -52c-3 -25 -3 -54 -3 -81c35 0 64 13 87 40 c15 18 28 39 28 63"],120105:[686,20,279,97,277,"277 673c-22 -19 -43 -36 -59 -60c-35 -54 -36 -106 -36 -222l2 -269c9 -14 26 -29 42 -41c14 4 29 14 40 23l10 -17l-128 -107l-51 76c6 108 8 217 8 326c0 96 -1 191 -6 286l17 4l17 -78l128 92"],120106:[475,26,766,7,757,"757 88l-126 -107c-10 19 -39 54 -59 79c6 42 11 75 11 214c0 26 -1 50 -3 68c-1 6 -2 12 -8 15c-28 17 -60 26 -92 34c-23 -12 -44 -24 -65 -38v-218c0 -8 0 -17 4 -22c0 0 30 -45 46 -66l-86 -73c-13 26 -36 52 -54 72c9 39 14 81 14 202c0 33 -1 66 -5 99 c-25 22 -71 35 -105 44l-61 -42v-197c0 -16 0 -40 5 -46l44 -58l-83 -70l-49 80c5 40 7 80 7 119l-4 188l-26 26l-43 -24l-12 12l111 93l50 -52v-44l127 99c38 -23 78 -43 120 -57v-39c44 30 88 61 129 95c39 -21 85 -46 121 -56c-5 -57 -3 -114 -4 -171v-121 c11 -16 23 -33 37 -47c20 7 28 10 47 26"],120107:[475,23,526,18,521,"521 93l-127 -112l-55 84c6 35 8 81 8 130c0 54 -2 111 -5 163c-29 15 -68 24 -101 27l-65 -44l-4 -207c0 -10 4 -24 7 -28l40 -54l-87 -75c-14 25 -30 48 -49 69c8 46 11 104 11 165l-4 154l-24 23l-35 -20l-13 12l104 90c12 -13 35 -31 54 -45v-58l144 108 c35 -24 73 -43 113 -58c-4 -43 -8 -102 -8 -164l4 -126c8 -13 16 -27 26 -40c3 -4 8 -5 13 -5c0 0 7 3 10 5l31 22"],120108:[481,28,488,66,413,"413 302c0 -71 -6 -141 -22 -210c-72 -46 -147 -88 -226 -120c-35 30 -71 62 -99 96c0 89 5 198 18 296c61 35 119 75 176 117c45 -30 94 -55 148 -66c3 -38 5 -75 5 -113zM334 356c-45 3 -104 25 -149 45c-13 -22 -19 -35 -27 -72c-8 -30 -11 -75 -11 -118 c0 -33 2 -64 6 -88c17 -21 36 -40 58 -55c18 -12 36 -21 57 -24c6 0 17 13 22 19c23 28 32 67 37 107c7 62 7 89 7 186"],120109:[538,214,500,12,430,"430 282l-2 -66c-2 -43 -6 -87 -21 -127l-134 -107c-32 26 -68 48 -108 55l4 -207l-80 -44l-11 7l9 254c-23 0 -46 -7 -64 -23l-11 14c25 23 62 57 76 67v247l-41 56c-3 4 -5 9 -5 14c0 8 3 15 7 22c21 34 47 66 76 94l14 -9c-10 -12 -18 -26 -18 -42c0 -13 5 -26 12 -38 c9 -16 21 -30 34 -43l-2 -34c49 33 95 69 142 106c35 -28 76 -48 118 -64c3 -44 5 -88 5 -132zM349 230c0 41 -3 82 -6 123c-25 13 -66 28 -96 38c-29 -11 -54 -27 -81 -44v-241c40 0 108 -30 157 -54c16 30 26 83 26 178"],120110:[480,224,489,59,418,"418 473c-15 -85 -19 -220 -19 -332c0 -107 0 -215 7 -320l-83 -45l-11 10l8 307c-56 -46 -113 -84 -174 -122l-84 94c-2 29 -3 57 -3 86c0 73 6 147 23 218c57 31 115 63 165 104c29 -18 60 -34 93 -41l70 48zM324 367c-51 3 -101 16 -150 31c-14 -25 -21 -54 -26 -82 c-7 -38 -10 -77 -10 -116c0 -19 0 -53 1 -56c0 -7 1 -14 6 -19c22 -22 50 -54 73 -75c24 13 70 48 101 71"],120111:[474,21,389,15,395,"395 413l-1 -13l-59 -55c-9 -8 -21 -10 -33 -10s-24 5 -33 11c-16 10 -36 25 -50 38c-16 -12 -31 -25 -37 -34v-198c0 -16 10 -29 19 -41c16 -19 34 -36 54 -51l89 42l2 -19l-146 -104c-37 29 -70 62 -103 96c6 59 6 109 6 170l-1 77c-1 32 -8 43 -8 43 c-9 8 -19 15 -29 22l-38 -16l-12 10l101 93c21 -21 43 -41 66 -60v-32l109 87c10 -18 22 -36 36 -52c8 -9 18 -19 30 -19c11 0 29 10 38 15"],120112:[479,30,442,-28,407,"407 441l-92 -78c-72 4 -126 27 -152 52c-20 -19 -20 -56 -20 -79c0 -25 2 -50 12 -73c4 -9 9 -21 20 -21c6 0 12 0 17 2l123 56c22 -7 39 -22 56 -38c2 -26 3 -52 3 -77c0 -46 -4 -91 -16 -135c-54 -24 -109 -49 -160 -80c-22 21 -46 41 -73 57c-17 10 -35 20 -55 20 c-30 0 -57 -16 -89 -44l-9 9c15 16 52 55 74 76c18 17 43 23 67 23c26 0 50 -8 72 -22l81 -54l4 -1c6 0 11 5 15 14c9 30 11 61 11 92c0 25 -3 63 -13 74c-14 17 -27 22 -39 22c-14 0 -22 -3 -35 -10l-102 -58c-15 19 -31 45 -43 67c0 49 1 97 6 142l152 102l81 -46 c8 -4 16 -6 26 -6c23 0 46 13 72 30"],120113:[641,21,333,26,349,"349 78l-137 -99l-97 83l1 289h-90l16 35h73v172l113 83l14 -14l-27 -26c-5 -5 -8 -12 -9 -19c-11 -64 -12 -131 -12 -196h108l-17 -35h-90l1 -228c20 -18 44 -39 63 -50c5 -3 11 -5 17 -5c15 0 36 9 70 32"],120114:[474,26,517,8,514,"514 88c-42 -34 -83 -69 -122 -106l-8 2c-19 32 -39 67 -51 103c-68 -45 -153 -113 -153 -113c-26 17 -88 62 -125 74c13 22 21 51 27 79c9 45 15 91 15 137c0 28 0 55 -7 82c-6 18 -14 37 -29 50l-38 -20l-15 10l111 88c16 -19 32 -38 52 -53c0 -22 1 -43 1 -65l-2 -92 c-2 -54 -9 -108 -24 -160l101 -48l88 59c3 50 4 100 4 149l-3 160l85 44l7 -6c-10 -79 -13 -158 -13 -237c0 -32 2 -64 6 -94c2 -14 8 -28 17 -40c4 -5 10 -8 17 -8c10 0 43 15 53 20"],120115:[533,28,511,51,439,"439 245c0 -37 -15 -119 -32 -159l-215 -114c-31 33 -67 65 -103 94c5 55 7 101 7 165l-4 114c0 8 -4 13 -9 19c-9 12 -17 18 -25 30c-4 8 -7 16 -7 24c0 6 5 20 8 26c21 32 45 62 72 89l14 -4c-9 -14 -16 -29 -16 -46c0 -10 11 -31 19 -45c5 -8 18 -20 27 -30v-34 l143 103c36 -24 74 -45 115 -61c6 -66 6 -100 6 -171zM359 243c0 38 -3 77 -8 115c-23 21 -54 32 -85 38c-28 -2 -63 -27 -91 -48l-3 -193c0 -25 14 -47 34 -62l37 -29c12 -8 27 -15 42 -15c13 0 24 9 33 19c11 11 18 25 24 40c13 40 17 99 17 135"],120116:[533,28,773,44,693,"693 268c0 -93 -19 -133 -41 -196l-187 -100c-44 27 -89 52 -134 74c5 59 8 118 8 176c0 40 -1 80 -5 119c-23 16 -56 37 -70 44c-7 3 -16 7 -24 7c-4 0 -12 -2 -16 -5c-22 -12 -38 -25 -58 -39l-1 -190c0 -24 6 -49 20 -70c8 -14 18 -29 27 -40l-75 -75 c-17 29 -35 57 -59 79c8 51 10 103 10 155c0 46 0 92 -3 138c-1 8 -4 13 -9 19c-9 12 -17 18 -25 30c-4 8 -7 16 -7 24c0 6 5 20 8 26c21 32 45 62 72 89l14 -4c-9 -14 -16 -29 -16 -46c0 -10 11 -31 19 -45c5 -8 18 -20 27 -30v-32c43 30 86 61 123 97 c41 -26 84 -48 128 -68l-1 -37l142 103c41 -24 83 -46 126 -66c5 -28 7 -92 7 -137zM610 264c0 28 -4 53 -12 84c-29 18 -68 33 -100 38c-22 0 -63 -33 -80 -45l-2 -106c0 -26 1 -53 3 -81c1 -23 12 -44 29 -59c27 -25 59 -51 100 -51c8 0 17 7 24 14c17 17 25 45 29 69 c7 45 9 91 9 137"],120117:[473,188,388,10,370,"370 398l-67 -59c-28 14 -47 42 -59 51c-15 -9 -42 -33 -49 -43c-4 -18 -5 -41 -5 -58c-1 -55 1 -115 5 -172c19 -20 40 -38 63 -54c4 -3 9 -4 13 -4c5 1 13 0 22 5l63 34l3 -18l-149 -105c-25 28 -54 52 -84 74c-17 -16 -33 -33 -45 -54c-9 -15 -16 -31 -16 -49 c0 -20 10 -37 24 -51c22 -22 69 -31 98 -34v-9l-64 -40c-33 8 -67 19 -92 44c-13 12 -21 29 -21 46c0 25 13 47 26 68c21 34 41 59 68 89c6 42 7 91 7 133c0 54 1 108 -3 161c-9 10 -21 23 -30 28c-3 2 -10 5 -10 5c-3 0 -8 -2 -12 -4c-9 -5 -29 -14 -29 -14l-13 9 c33 34 68 65 104 96c25 -22 63 -57 76 -66l-2 -31l109 89c17 -28 42 -48 69 -67"],120118:[524,219,498,45,437,"437 228c0 -80 -8 -174 -30 -251c-13 -46 -117 -145 -185 -196l-38 17c34 17 67 36 94 62c22 22 36 51 46 80c11 32 16 66 19 99c5 50 7 100 7 150c0 56 -5 112 -11 167c-24 15 -53 30 -80 35c-15 0 -59 -28 -89 -51c-1 -64 -3 -81 -3 -188c0 -38 15 -66 45 -104l-75 -75 c-17 29 -39 61 -59 79c10 61 11 126 11 203c0 31 0 35 -3 81c-1 8 -4 13 -9 19c-9 12 -17 19 -25 31c-4 6 -7 14 -7 24c0 4 5 19 8 25c21 32 45 62 72 89l14 -4c-9 -14 -16 -29 -16 -46c0 -10 11 -31 19 -45c5 -8 18 -20 27 -30v-32l142 106c37 -22 74 -44 115 -59 c7 -60 11 -130 11 -186"],120119:[471,215,390,-7,314,"314 64c0 -55 -10 -110 -24 -163l-137 -116c-45 0 -88 15 -124 41c-18 14 -32 34 -36 58c7 27 54 91 87 129c42 49 82 89 127 134c-35 16 -71 30 -110 34v11l73 67c16 15 34 33 34 56s-16 45 -34 57c-12 9 -26 16 -39 16c-7 0 -10 -2 -17 -6c-24 -14 -46 -30 -67 -47 l-11 12c45 45 93 87 146 124c24 -12 49 -25 68 -44c10 -9 19 -20 25 -31c5 -8 9 -17 9 -26c0 -13 -6 -26 -15 -36l-81 -87c37 -13 79 -29 112 -49c11 -43 14 -89 14 -134zM240 20c0 37 -4 74 -15 110c-41 -35 -81 -71 -113 -114c-20 -27 -40 -56 -40 -89 c0 -17 10 -31 22 -42c23 -24 55 -39 89 -39c10 0 16 4 23 12c10 10 18 37 22 57c7 35 12 70 12 105"],120172:[688,31,847,29,827,"827 77c-53 -35 -104 -73 -159 -106l-12 3l-73 150l-232 -155c-27 29 -56 57 -90 80c-21 14 -43 24 -70 24c-34 0 -93 -23 -133 -42l-22 17l200 128c27 17 52 36 76 56c69 59 100 141 100 223c0 49 -27 95 -68 129c-29 25 -70 40 -112 40c-36 0 -58 -7 -78 -24 c-17 -14 -22 -35 -22 -54c0 -12 6 -23 13 -33c11 -15 24 -28 38 -41c16 -14 32 -28 44 -45c8 -10 15 -20 15 -32c0 -30 -22 -56 -46 -77c-32 -27 -70 -50 -110 -69l-19 15c19 11 36 24 51 40c9 11 18 23 18 37c0 16 -30 48 -51 70c-16 16 -30 33 -42 52c-7 13 -14 26 -14 40 c0 28 16 56 46 83c51 47 171 102 244 102c58 0 116 -15 158 -50c38 -33 59 -77 59 -123c0 -69 -25 -136 -60 -198l-193 -158c25 0 50 -9 72 -21c31 -16 58 -37 82 -59c37 9 97 46 142 74v429l174 105l14 -13l-51 -51c-8 -49 -8 -215 -8 -323c0 -40 1 -81 4 -121l57 -101 l53 21"],120173:[685,31,1043,56,963,"963 491l-204 -103v-5c26 -3 46 -6 75 -14c24 -7 47 -16 65 -31c38 -32 52 -77 52 -122s-16 -79 -45 -116l-172 -111c-25 -13 -62 -20 -97 -20c-63 0 -124 16 -182 35s-118 37 -181 37c-64 0 -122 -20 -177 -50l-13 24l139 77c42 24 83 49 119 79c20 18 39 41 45 64 c9 36 13 81 13 120c0 37 0 76 -7 112c-9 44 -29 90 -67 122c-24 20 -52 38 -87 38c-23 0 -45 -9 -61 -23c-14 -12 -21 -31 -21 -48c0 -13 8 -24 16 -34c13 -18 29 -34 46 -50c14 -13 28 -27 39 -43c7 -10 14 -21 14 -33c0 -10 -3 -21 -10 -30c-9 -13 -23 -26 -35 -36 c-39 -34 -84 -60 -131 -84l-20 13c23 15 44 30 65 48c12 10 19 24 19 39c0 16 -24 40 -42 58c-18 17 -34 36 -46 57c-9 15 -16 31 -16 48c0 29 40 70 70 95c58 50 130 81 206 81c80 0 142 -49 173 -108c42 30 85 59 134 80c34 15 70 28 108 28c22 0 45 -4 61 -18 c15 -12 28 -60 36 -92c5 -18 13 -36 25 -51c8 -9 18 -19 33 -19c21 0 41 4 61 10v-24zM827 144c0 48 -13 99 -53 133c-41 35 -98 50 -154 50c-33 0 -65 -4 -96 -11c-4 -25 -14 -51 -24 -75l-145 -104c83 -2 164 -23 241 -49c50 -17 104 -34 154 -34c19 0 38 5 51 16 c23 19 26 48 26 74zM778 433c-9 5 -21 12 -27 19c-7 8 -13 17 -15 26c-1 2 -8 40 -12 60c-5 24 -11 51 -29 67c-10 8 -23 14 -36 14c-25 0 -47 -9 -66 -21c-27 -18 -51 -45 -72 -78c6 -30 7 -58 7 -101l-2 -73c83 19 174 46 252 87"],120174:[677,32,723,71,729,"729 553c-27 -11 -105 -40 -140 -40c-8 0 -16 1 -23 5c-10 6 -21 13 -28 22c-18 22 -35 47 -49 71c-9 -2 -17 -5 -23 -10c-13 -12 -19 -30 -19 -46c0 -17 11 -49 21 -74c11 -28 19 -61 19 -86c0 -32 -6 -66 -33 -89c-20 -17 -44 -28 -68 -40c-37 -16 -76 -30 -115 -43 l-20 14c25 7 45 22 63 37c21 18 37 41 37 67c0 32 -7 63 -16 94c-9 33 -20 71 -20 98c0 9 1 16 4 22c-24 -2 -47 -11 -64 -26c-21 -17 -33 -40 -42 -64c-11 -30 -14 -62 -14 -94c0 -88 27 -177 101 -240c63 -53 146 -86 234 -86c21 0 45 1 62 9l116 54l4 -27l-166 -103 c-28 -6 -56 -10 -85 -10c-102 0 -206 21 -279 82c-79 68 -115 162 -115 256c0 62 18 123 47 179c21 16 124 77 195 108l25 -25c82 52 173 93 274 109c8 -36 19 -72 50 -98c8 -7 19 -7 30 -7c8 0 26 3 37 6v-25"],120175:[685,29,981,30,896,"896 372c0 -76 -28 -160 -78 -231c-81 -68 -154 -117 -249 -170c-47 0 -101 18 -149 34c-78 26 -175 58 -239 58c-30 0 -59 -10 -80 -28c-19 -16 -37 -32 -53 -49l-17 19c42 58 98 108 155 156c33 0 65 1 97 7c20 3 40 7 54 19s20 28 20 45c0 13 -7 25 -14 36 c-12 18 -26 34 -40 49s-27 29 -38 46c-7 10 -13 21 -13 33c0 14 3 28 12 40c12 17 25 31 42 45c33 25 69 48 107 68l22 -11c-27 -22 -62 -52 -62 -75c0 -21 22 -47 45 -75c14 -16 37 -42 49 -60c8 -11 13 -25 13 -39c0 -25 -13 -48 -34 -66c-34 -29 -78 -53 -127 -71 c61 -10 179 -42 261 -64c25 -7 52 -13 79 -13c25 0 45 13 62 28c23 18 35 45 43 71c12 34 16 69 16 104c0 86 -25 174 -98 235c-35 30 -80 49 -127 62c-58 16 -119 22 -180 22c-89 0 -181 -17 -246 -71c-51 -44 -71 -100 -74 -161l-25 7c0 85 32 169 104 230 c36 31 83 49 132 61c64 16 131 22 197 22c125 0 253 -25 342 -101c66 -56 91 -134 91 -212"],120176:[687,29,782,73,733,"733 584l-3 -27l-131 -40c-4 -1 -7 -1 -11 -1s-9 0 -12 2c-10 7 -19 14 -27 24l-59 76c-19 -12 -36 -28 -36 -48c0 -28 8 -66 19 -96s19 -66 19 -93l122 80c20 -36 45 -65 81 -89v-12l-89 -55c-12 7 -25 13 -44 26l-36 25c-4 3 -10 6 -15 6c-9 0 -16 -4 -23 -9 c-7 -15 -17 -27 -30 -38c-50 -42 -114 -69 -181 -89l-22 14c26 10 48 23 68 40c17 14 31 32 31 53c0 30 -4 63 -14 94c-9 28 -15 51 -22 79c-2 8 -3 15 -3 23c0 13 2 26 6 38c-20 -4 -37 -14 -52 -26c-52 -45 -74 -107 -74 -170c0 -87 38 -171 111 -233 c58 -49 134 -84 216 -84c71 0 138 23 195 58v-27l-155 -103c-38 -7 -77 -11 -116 -11c-94 0 -191 24 -259 81c-79 67 -114 159 -114 253c0 61 12 122 41 178c60 43 126 81 193 116l43 -18c79 47 168 80 260 106c6 -22 14 -44 24 -65c8 -15 15 -25 28 -36c7 -6 15 -9 25 -9 c13 0 33 4 46 7"],120177:[684,147,721,17,734,"734 626c-46 -45 -117 -114 -151 -140c-40 2 -85 13 -122 24c-29 -9 -52 -19 -74 -37c-15 -13 -23 -32 -23 -50c0 -19 9 -37 19 -55c15 -25 34 -47 55 -69l150 119c31 -25 50 -57 70 -94l-68 -60c-8 12 -20 28 -28 35c-8 8 -25 14 -39 14c-21 0 -48 -17 -67 -33 c33 -34 68 -73 93 -112c16 -24 31 -49 31 -77c0 -51 -21 -101 -64 -137c-66 -57 -151 -101 -247 -101c-38 0 -79 7 -107 30c-31 27 -39 66 -39 103c0 27 4 49 9 75c3 13 5 39 5 55c0 15 -3 29 -16 40c-9 8 -23 13 -36 13c-17 0 -45 -13 -64 -23l-4 23c32 18 65 35 100 47 c26 9 53 14 80 14c19 0 31 -5 44 -15c11 -10 19 -26 19 -39c0 -22 -2 -45 -8 -69c-11 -43 -17 -87 -17 -123c0 -26 6 -53 28 -72c16 -12 37 -19 58 -19c41 0 80 16 109 41c31 26 38 65 38 101c0 39 -55 101 -95 147c-30 35 -60 70 -85 109c-14 21 -28 44 -28 69 c0 17 7 33 17 48c12 19 29 37 48 53c29 24 64 44 100 61c-69 32 -163 74 -215 74c-22 0 -46 -4 -62 -18c-36 -30 -53 -71 -63 -113l-26 16c14 58 39 115 89 157c37 32 89 46 141 46c29 0 57 -7 83 -15c41 -11 80 -27 118 -43c43 -18 105 -45 131 -45c25 0 70 36 101 61"],120178:[692,27,927,74,844,"844 291c0 -95 -65 -179 -159 -227l-150 -82c-35 -5 -68 -9 -96 -9c-91 0 -181 25 -244 81c-82 73 -121 166 -121 264c0 36 5 73 14 108c7 25 17 46 34 70c62 39 124 72 186 100l35 -12c44 30 93 56 144 78l22 -12c-43 -18 -77 -60 -77 -100c0 -29 17 -97 31 -144l66 34 l114 86c-35 6 -64 16 -90 37c-13 12 -24 33 -29 49l112 80c8 -21 15 -43 33 -58c19 -16 46 -22 72 -22c12 0 42 8 65 19l3 -24c-72 -49 -137 -96 -204 -149c67 0 135 -13 183 -54c36 -30 56 -71 56 -113zM718 216c0 48 -17 96 -57 130c-33 28 -81 43 -127 43 c-23 0 -45 -1 -68 -9c2 -11 3 -23 3 -35c0 -24 -6 -48 -25 -65c-17 -14 -38 -25 -59 -34c-31 -13 -59 -22 -92 -32l-11 18c16 6 28 12 43 24c16 16 24 35 24 55c0 13 -11 70 -21 102c-15 50 -21 80 -21 100c0 19 1 39 13 55c-30 -2 -51 -15 -73 -33 c-42 -36 -56 -95 -56 -144c0 -96 38 -190 119 -258c59 -51 137 -84 222 -84c47 0 96 12 130 39c40 31 56 81 56 128"],120179:[684,127,850,0,753,"753 242c0 -87 -15 -173 -51 -254l-160 -100c-14 -8 -30 -15 -47 -15c-38 0 -72 17 -97 40l59 70l16 1c6 -14 16 -28 29 -38c10 -8 22 -15 36 -15c21 0 37 8 52 20c37 31 37 150 37 227c0 34 -3 138 -45 172c-26 22 -61 38 -98 38c-23 0 -46 -2 -67 -9 c-18 -7 -43 -19 -81 -48c24 -40 46 -94 46 -126c0 -27 -7 -58 -28 -78l-166 -145c-16 15 -34 29 -53 40c-13 7 -27 14 -43 14c-33 0 -67 -30 -76 -39l-16 18c47 47 116 117 160 117c42 0 76 -29 99 -56c17 20 29 40 29 64c0 34 -41 101 -72 149c-32 48 -61 100 -61 156 c0 25 14 48 29 69c24 33 53 62 84 90c33 30 76 55 116 80c21 -18 44 -35 69 -48c17 -9 36 -17 56 -17c35 0 88 34 111 51l14 -18c-35 -32 -89 -80 -139 -111c-15 -4 -31 -7 -47 -7c-59 0 -111 27 -157 56c-20 -18 -37 -40 -37 -65c0 -23 6 -45 15 -66c14 -35 33 -71 53 -102 c57 43 125 88 187 122c64 0 148 -23 186 -45c10 -5 31 -29 34 -40c20 -58 24 -91 24 -152"],120180:[683,25,654,31,623,"623 655c-61 -48 -133 -98 -205 -133l-110 36c-33 9 -66 18 -101 18c-32 0 -67 -12 -90 -31c-27 -23 -46 -53 -64 -82h-7l-15 14c19 37 39 73 62 108c15 22 31 44 52 61c30 24 70 37 110 37c38 0 89 -19 132 -34c36 -13 92 -33 111 -33s34 5 51 13c23 11 44 26 65 41z M607 568c-24 -17 -39 -31 -56 -53c-12 -16 -23 -40 -23 -60c0 -24 0 -54 3 -72c8 -48 15 -97 15 -145c0 -60 -24 -122 -75 -165c-65 -56 -142 -98 -236 -98c-48 0 -92 21 -128 47l-62 -38l-7 18c47 48 105 96 159 138c23 -42 37 -67 74 -97c13 -11 31 -15 49 -15 c24 0 42 8 59 21c15 13 25 38 31 56c8 25 8 51 8 77c0 10 -8 75 -12 112c-3 26 -5 52 -5 77c0 21 1 42 4 62c51 55 110 104 177 145"],120181:[681,142,652,-8,615,"615 586c-68 -33 -148 -64 -223 -85c-28 12 -69 37 -75 41c-36 19 -74 37 -117 37c-24 0 -52 -7 -68 -21c-22 -18 -36 -40 -50 -62l-18 14c26 46 55 91 98 127c30 26 70 44 113 44c17 0 32 -7 46 -14c24 -11 46 -24 68 -38c21 -14 43 -28 66 -39c14 -6 29 -11 45 -11 c32 0 77 16 115 30v-23zM572 517c-46 -40 -82 -88 -83 -145c0 -44 7 -87 20 -129c15 -52 30 -105 30 -159c0 -56 -21 -111 -68 -151c-52 -44 -120 -75 -194 -75c-37 0 -77 3 -104 26c-30 25 -43 59 -48 94c-5 29 -12 73 -24 83c-11 9 -26 15 -42 15c-20 0 -40 -8 -58 -17 l-9 18c30 20 62 39 97 53c26 11 54 19 83 19c20 0 41 -5 56 -18c10 -8 12 -21 13 -33c1 -6 1 -67 1 -100s11 -65 38 -88c12 -10 28 -17 44 -17c26 0 50 7 68 23c26 22 32 59 32 90c0 47 -18 111 -35 165c-15 46 -25 93 -25 140c0 17 1 34 5 51c50 62 109 117 178 164"],120182:[682,26,789,20,813,"813 69c-31 -26 -64 -51 -101 -71c-23 -12 -48 -24 -75 -24c-17 0 -36 1 -48 11c-33 29 -45 69 -51 108c-2 11 -12 72 -37 108c-8 11 -29 29 -73 29c-34 0 -70 -11 -98 -24c2 -13 3 -25 3 -37c0 -14 -2 -28 -6 -45c-43 -46 -94 -93 -151 -127c-14 -8 -27 -14 -39 -19 c-38 0 -72 14 -93 30c-3 2 -16 15 -24 24l86 85h14c8 -20 17 -39 35 -54c9 -8 21 -17 35 -17c10 0 22 2 29 8c15 14 18 33 18 53c0 34 -24 91 -41 133c-23 55 -44 111 -44 169c0 30 13 59 30 85c23 35 53 65 86 93c69 59 162 95 260 95c117 0 156 -35 183 -64 c2 -10 3 -19 3 -30c0 -22 -5 -50 -22 -98l-20 -6c-8 39 -25 58 -49 79c-46 39 -112 62 -176 62c-54 0 -108 -15 -147 -48c-27 -24 -38 -57 -38 -90c0 -43 10 -84 23 -125c41 39 85 75 137 104c36 19 76 36 119 36c22 0 44 -4 60 -17c16 -15 28 -44 37 -68 c6 -18 12 -35 22 -44c6 -6 16 -10 26 -10c12 0 24 0 46 6l6 -21l-197 -65c79 -15 95 -46 108 -126c7 -41 27 -67 45 -79c12 -8 29 -11 45 -11c25 0 47 11 67 24zM556 312c-13 9 -29 36 -35 55c-4 17 -12 35 -27 47c-11 9 -24 18 -39 18c-37 0 -72 -15 -97 -37 c-24 -20 -44 -52 -58 -81c8 -26 16 -58 23 -84c31 14 102 46 178 59"],120183:[684,28,786,30,764,"764 200c-10 -40 -22 -81 -39 -119c-11 -24 -22 -48 -43 -66c-30 -25 -69 -43 -112 -43c-93 0 -182 23 -267 53c-49 17 -119 41 -151 41c-19 0 -38 -6 -52 -17c-12 -11 -37 -44 -54 -67l-16 13c28 59 65 115 117 161c11 6 25 9 41 9c7 0 14 0 22 -2c15 4 36 12 47 21 c15 12 20 30 20 47c0 22 -37 78 -61 115c-21 32 -38 67 -38 104c0 25 11 50 26 72c19 30 45 57 74 81c67 57 162 81 256 81c40 0 82 -5 111 -29c13 -12 17 -27 20 -43c4 -23 3 -47 0 -70c-2 -13 -3 -21 -3 -34c0 -17 9 -31 24 -42c6 -5 45 -5 63 -5v-18l-172 -49 c-10 0 -21 1 -29 7c-13 12 -21 27 -21 43c0 14 4 26 6 40c2 17 5 48 5 52c0 23 -8 46 -27 62c-25 21 -60 30 -95 30c-32 0 -65 -8 -88 -27c-24 -21 -35 -49 -35 -78c0 -40 18 -78 37 -115c14 -26 26 -53 34 -82c5 -15 8 -31 8 -47c0 -5 -1 -17 -5 -22 c-24 -29 -68 -72 -116 -104l153 -47c56 -17 116 -24 176 -24c42 0 84 11 114 37c29 24 42 58 51 92"],120184:[686,33,1239,26,1232,"1232 548c-48 -25 -73 -41 -116 -72c-5 -4 -25 -28 -28 -39c-9 -36 -9 -61 -9 -97c0 -55 2 -110 14 -164c4 -19 10 -37 18 -55c6 -13 10 -18 21 -27c7 -6 20 -9 30 -9c9 0 41 2 61 9v-18l-179 -109l-80 111c0 105 0 237 6 289c33 48 63 77 101 103c-12 3 -18 5 -26 17 c-9 14 -22 40 -31 62c-7 18 -16 36 -29 48c-10 8 -23 15 -37 15c-24 0 -49 -5 -69 -16c-25 -14 -40 -26 -60 -45c5 -28 8 -72 8 -101c0 -107 -14 -216 -69 -313l-170 -165c-13 12 -28 24 -45 33c-11 6 -23 12 -37 12c-18 0 -36 -5 -49 -16l-16 -14l-17 15c25 28 66 71 78 82 c19 17 44 33 73 33c25 0 57 -24 77 -41c27 34 36 72 45 112c13 59 14 124 14 183c0 40 -3 80 -15 119c-9 27 -21 54 -44 74c-14 12 -32 23 -52 23c-15 0 -28 -4 -41 -10c-16 -9 -32 -21 -50 -35c8 -31 14 -62 14 -94c0 -107 -34 -221 -87 -318l-219 -155 c-13 15 -28 29 -45 39c-14 8 -29 14 -45 14c-20 0 -39 -5 -54 -17c-10 -8 -24 -20 -30 -26l-15 15c29 32 66 72 91 92c23 20 54 31 86 31c16 0 30 -7 42 -15c18 -11 35 -28 48 -44c14 10 26 19 35 28c28 25 39 60 50 95c15 51 20 104 20 157c0 46 -4 94 -18 139 c-11 32 -27 66 -55 90c-31 27 -70 46 -116 46c-21 0 -51 -1 -67 -15c-13 -11 -23 -23 -23 -39c0 -17 36 -59 61 -85c43 -43 58 -67 58 -84c0 -29 -21 -55 -45 -76c-35 -29 -74 -51 -116 -72l-18 17c21 10 38 20 53 35c10 11 20 24 20 38c0 20 -28 54 -50 76 c-26 26 -58 63 -58 89c0 18 12 35 23 51c18 24 41 45 65 65c53 45 137 67 188 67c48 0 104 -15 139 -44c24 -20 46 -47 58 -74c30 23 62 44 98 59c26 11 55 20 85 20c35 0 68 -12 93 -32c13 -12 23 -25 33 -38c36 30 76 58 121 78c36 16 74 28 115 28c15 0 31 -2 43 -12 c10 -9 25 -37 34 -55c8 -16 17 -32 31 -44c9 -9 21 -14 34 -14c16 0 31 3 46 8v-18"],120185:[681,33,982,26,968,"968 542c-29 -13 -66 -37 -108 -66c-5 -4 -25 -28 -28 -39c-9 -36 -9 -61 -9 -97c0 -55 2 -110 14 -164c4 -19 10 -37 18 -55c6 -13 10 -18 21 -27c7 -6 20 -9 30 -9c9 0 41 2 61 9v-18l-179 -109l-80 111c0 105 0 237 6 289c33 48 63 79 101 105c-14 0 -26 7 -35 15 c-13 12 -23 27 -29 42c-10 25 -25 57 -41 71c-11 8 -24 14 -38 14c-25 0 -53 -16 -72 -30c-36 -28 -57 -50 -82 -83c16 -48 24 -113 24 -166c0 -72 -13 -144 -44 -210l-205 -143c-9 -4 -23 -10 -29 -10c-51 0 -93 27 -128 57l88 94h19c5 -14 20 -33 38 -49 c14 -11 32 -17 52 -17c25 0 46 14 64 28c15 13 19 31 23 49c7 29 9 58 9 88c0 70 -8 140 -31 208c-17 51 -43 100 -87 138c-31 26 -70 47 -115 47c-21 0 -44 -3 -60 -16c-12 -11 -19 -24 -19 -39s9 -30 17 -43c14 -20 31 -38 49 -56c14 -13 27 -27 37 -43 c7 -10 10 -21 10 -33c0 -26 -17 -46 -37 -65c-32 -31 -70 -55 -111 -78l-22 16c18 11 35 24 48 39c9 11 17 23 17 37c0 20 -19 39 -36 55c-18 18 -34 36 -46 56c-9 15 -17 30 -17 47c0 20 9 39 20 57c16 24 35 46 59 64c52 39 122 68 195 68c49 0 98 -15 133 -45 c33 -28 57 -63 70 -101c22 22 44 43 68 63c50 43 112 80 185 80c25 0 53 -3 71 -19c11 -9 24 -36 36 -64c5 -13 14 -24 25 -34c6 -7 13 -13 23 -13c18 0 36 3 52 9"],120186:[726,29,976,11,881,"881 378c0 -86 -29 -170 -81 -244c-57 -51 -117 -88 -185 -121c-48 -23 -100 -42 -155 -42c-64 0 -126 18 -187 37c-41 13 -90 28 -125 28c-47 0 -86 -25 -116 -54l-21 22l89 110c35 8 70 17 95 38c15 12 22 29 22 47c0 19 -8 37 -17 55c-14 26 -31 50 -52 76 c-26 32 -50 74 -51 104c0 44 77 117 150 157l14 -14c-26 -18 -56 -42 -56 -77c0 -16 25 -67 47 -98c24 -32 40 -59 59 -93c5 -9 10 -23 10 -33c0 -22 -7 -41 -21 -60c-23 -30 -56 -62 -88 -83c22 0 41 -2 70 -9c58 -14 117 -31 173 -50c49 -17 82 -27 147 -27 c26 0 51 15 70 29c32 25 48 66 60 101c15 49 20 100 20 150c0 40 -5 81 -18 119c-11 30 -26 59 -52 81c-24 20 -56 29 -88 36c-46 10 -74 13 -122 13c-25 0 -47 -1 -68 -3c-15 -1 -30 -1 -44 -1c-46 0 -64 26 -64 36c0 24 4 35 35 60c18 13 52 45 62 58h25 c-6 -9 -11 -19 -11 -30c0 -9 10 -15 20 -17c12 -3 69 -5 103 -7c56 -3 112 -10 165 -24c40 -12 79 -27 109 -53c29 -24 45 -55 57 -87c15 -42 20 -86 20 -130"],120187:[685,223,977,19,944,"944 511c-45 -20 -89 -41 -126 -69c-15 -12 -29 -27 -29 -45c0 -32 18 -61 38 -88c23 -31 54 -74 54 -104c0 -24 -5 -48 -19 -69c-24 -35 -48 -63 -83 -94c-20 -18 -42 -36 -67 -47c-19 -9 -40 -15 -61 -15c-54 0 -109 18 -157 39c0 -54 5 -106 14 -160l-138 -82l-27 14 c12 35 16 70 20 105c5 52 7 106 7 158c-27 11 -60 22 -86 22c-37 0 -67 -18 -92 -40c-18 -15 -36 -30 -51 -48l-15 19c32 47 70 104 107 135c25 21 57 37 93 37c12 0 26 -1 43 -6v196c0 43 -6 87 -21 128c-12 33 -29 64 -57 88c-24 20 -53 34 -87 34c-38 0 -58 -12 -71 -22 c-15 -13 -22 -30 -22 -49c0 -12 10 -26 18 -36c13 -16 26 -32 42 -46c26 -24 52 -59 52 -80c0 -11 -1 -21 -7 -30c-8 -14 -16 -28 -28 -39c-36 -32 -75 -57 -119 -82l-16 19c19 12 29 21 43 38c10 12 18 26 18 41c0 16 -27 46 -46 67c-14 15 -31 29 -42 46c-7 9 -7 23 -7 35 c0 23 11 44 24 64c18 27 42 51 67 73c45 38 104 67 181 67c54 0 97 -17 134 -48c24 -20 48 -53 60 -80c42 36 88 71 140 94c39 17 81 30 124 30c21 0 43 -2 58 -15c15 -12 21 -42 24 -65c3 -20 11 -39 25 -56c10 -11 23 -22 40 -22c13 0 30 4 41 7zM794 109 c0 15 -8 34 -16 47c-12 21 -26 40 -42 59c-27 32 -57 76 -57 100c0 19 9 37 23 52c25 28 54 53 84 76c-19 3 -39 9 -48 17c-7 6 -23 22 -23 62c0 15 -2 30 -6 44c-3 10 -7 20 -15 27c-14 12 -32 19 -51 19c-37 0 -69 -17 -95 -39c-10 -9 -35 -33 -54 -55v-376l175 -63 c26 -9 50 -17 77 -17c12 0 24 4 32 11c12 10 16 22 16 36"],120188:[726,82,976,11,917,"917 -3c-53 -28 -107 -55 -163 -79c-60 22 -117 48 -169 83c-36 -17 -84 -30 -125 -30c-64 0 -126 18 -187 37c-41 13 -90 28 -125 28c-47 0 -86 -25 -116 -54l-21 22l89 110c35 8 70 17 95 38c15 12 22 29 22 47c0 19 -8 37 -17 55c-14 26 -31 50 -52 76 c-26 32 -50 74 -51 104c0 44 77 117 150 157l14 -14c-26 -18 -56 -42 -56 -77c0 -16 25 -67 47 -98c24 -32 40 -59 59 -93c5 -9 10 -23 10 -33c0 -22 -7 -41 -21 -60c-23 -30 -56 -62 -88 -83c22 0 41 -2 70 -9c58 -14 117 -31 173 -50c49 -17 82 -27 147 -27 c26 0 51 15 70 29c32 25 48 66 60 101c15 49 20 100 20 150c0 40 -5 81 -18 119c-11 30 -26 59 -52 81c-24 20 -56 29 -88 36c-46 10 -74 13 -122 13c-25 0 -47 -1 -68 -3c-15 -1 -30 -1 -44 -1c-46 0 -64 26 -64 36c0 24 4 35 35 60c18 13 52 45 62 58h25 c-6 -9 -11 -19 -11 -30c0 -9 10 -15 20 -17c12 -3 69 -5 103 -7c56 -3 112 -10 165 -24c40 -12 79 -27 109 -53c29 -24 45 -55 57 -87c15 -42 20 -86 20 -130c0 -86 -29 -170 -81 -244c-22 -24 -56 -48 -84 -68c20 -18 39 -33 63 -47c16 -8 33 -17 52 -17c29 0 58 7 86 16 v-21"],120189:[689,29,977,19,977,"977 72l-170 -101c-23 46 -57 92 -89 137c0 46 0 118 -3 139c-2 18 -9 45 -24 59c-20 16 -39 29 -66 34c-15 3 -31 4 -46 4c-20 0 -40 -7 -59 -15c-11 -68 -30 -118 -67 -179l-230 -178l-51 41c-10 8 -23 15 -36 15s-24 -3 -35 -9c-16 -9 -29 -22 -41 -37l-16 16 c45 53 113 126 158 126c21 0 34 -7 44 -14c19 -13 36 -28 52 -43c13 9 26 19 38 29c29 24 38 59 45 93c13 55 16 111 16 168c0 43 -6 86 -21 128c-13 33 -34 65 -64 91c-28 24 -62 43 -107 43c-29 0 -59 -12 -72 -22c-15 -13 -22 -30 -22 -49c0 -12 10 -26 18 -36 c13 -16 26 -32 42 -46c26 -24 52 -59 52 -80c0 -11 -1 -21 -7 -30c-8 -14 -16 -28 -28 -39c-36 -32 -75 -57 -119 -82l-16 19c19 12 29 21 43 38c10 12 18 26 18 41c0 16 -27 46 -46 67c-14 15 -31 29 -42 46c-7 9 -7 23 -7 35c0 23 11 44 24 64c18 27 42 51 67 73 c45 38 109 71 187 71c63 0 109 -19 150 -54c26 -22 50 -52 61 -81c35 35 77 72 122 98c33 18 70 33 109 33c26 0 50 -7 68 -23c21 -17 25 -48 28 -74c3 -23 6 -53 21 -66c12 -10 29 -12 45 -12c11 0 26 1 37 5l6 -13c-93 -36 -146 -62 -210 -101c34 -4 73 -22 94 -45 c0 -58 1 -127 7 -163c4 -22 12 -45 25 -63c12 -16 24 -30 39 -43c7 -6 22 -6 34 -6c14 0 29 7 43 14zM778 458c-14 0 -30 6 -41 16c-14 13 -20 41 -23 64s-9 55 -21 65c-9 8 -20 15 -33 15c-29 0 -54 -14 -74 -31c-28 -25 -44 -43 -64 -73c3 -17 3 -47 3 -64 c0 -31 -1 -59 -3 -92c43 18 104 43 156 47"],120190:[685,31,978,82,906,"906 261c0 -69 -44 -129 -101 -177c-94 -80 -225 -115 -356 -115c-98 0 -192 32 -261 91c-76 64 -106 151 -106 252c0 100 45 184 126 253c78 65 164 91 287 120c44 -16 85 -37 128 -55c50 -21 110 -46 156 -46c25 0 46 7 64 22c17 14 24 28 33 47l25 -14 c-12 -31 -34 -59 -61 -82c-33 -28 -75 -47 -120 -58c-18 -4 -36 -9 -55 -9c-26 0 -49 12 -71 23l-131 67c-28 14 -58 30 -89 30c-35 0 -68 -14 -92 -34c-58 -49 -81 -118 -81 -187c0 -95 31 -192 112 -260c67 -57 158 -88 253 -88c79 0 159 18 215 66c28 24 41 58 41 91 c0 32 -16 63 -43 85c-36 31 -89 39 -139 39c-44 0 -86 -8 -128 -17c-42 -8 -111 -22 -125 -22c-33 0 -66 7 -89 27c-20 17 -28 40 -28 64c0 37 23 70 54 96c28 24 67 37 103 45l13 -14c-11 -4 -21 -12 -30 -19c-17 -14 -30 -33 -30 -53c0 -16 5 -31 18 -42 c18 -15 46 -15 70 -15c48 0 94 5 140 13c45 7 88 21 133 21c43 0 87 -10 118 -36c33 -29 47 -69 47 -109"],120191:[691,30,789,30,798,"798 659l-121 -118c-29 -9 -60 -13 -92 -13c-42 0 -98 8 -139 15c-32 -28 -48 -49 -48 -85c0 -16 8 -36 19 -49c15 -19 32 -39 50 -56c34 -32 82 -77 82 -106c0 -35 -69 -91 -116 -127c48 -19 117 -46 155 -46c19 0 38 6 55 14c28 11 52 27 76 43l11 -24 c-55 -50 -117 -95 -181 -137c-53 0 -103 18 -150 38c-39 16 -85 34 -123 34c-18 0 -37 0 -51 -5c-36 -13 -59 -30 -88 -52l-12 18c57 51 119 97 191 132c30 0 62 -1 91 -8c18 15 29 33 29 55c0 24 -42 66 -72 94c-19 18 -37 37 -53 57c-11 14 -20 29 -25 45 c32 63 76 122 134 171c-75 14 -191 37 -224 37c-27 0 -62 -3 -81 -19c-13 -11 -18 -27 -18 -41c0 -39 35 -68 72 -89l2 -10l-83 -68c-32 25 -58 58 -58 96c0 51 31 98 73 135c63 53 144 90 234 90c37 0 124 -14 185 -25c49 -7 99 -14 150 -14c46 0 94 23 126 50v-32"],120192:[689,39,850,16,871,"871 72l-200 -111l-71 123l-127 -105c-19 -11 -41 -13 -55 -13c-45 0 -89 14 -129 30c-52 21 -116 42 -165 42c-18 0 -44 -8 -90 -50l-18 20l106 109c32 5 71 15 87 25c18 11 51 38 61 65c3 11 4 26 4 37c0 20 -6 40 -15 60c-11 27 -27 53 -44 78c-26 39 -59 90 -59 122 c0 7 0 16 2 22c15 25 63 80 104 114c21 18 45 34 71 49l23 -16c-16 -10 -31 -22 -46 -34c-17 -14 -30 -32 -30 -53c0 -37 16 -73 35 -107c23 -40 41 -83 50 -127c4 -17 6 -34 6 -51c0 -18 -2 -35 -8 -52c-38 -42 -95 -103 -125 -121c26 -4 94 -26 136 -41 c36 -13 79 -24 113 -24c31 0 55 18 77 36c14 13 22 30 23 47c4 67 7 134 7 201c0 82 -5 164 -13 246c67 24 128 57 187 92l17 -15l-25 -24c-25 -26 -34 -60 -38 -93c-7 -58 -8 -115 -8 -173c0 -61 1 -121 5 -182c1 -18 2 -22 15 -52c9 -21 22 -44 36 -59c7 -9 19 -13 33 -13 s43 10 64 18"],120193:[687,29,981,25,966,"966 604c-18 -10 -34 -22 -49 -35c-37 -31 -72 -69 -72 -115c0 -27 0 -58 5 -81l17 -95c4 -22 7 -45 7 -68c0 -46 -16 -88 -56 -120c-47 -39 -106 -82 -126 -93c-27 -15 -56 -26 -90 -26c-26 0 -51 8 -76 16c-40 13 -78 29 -116 46c-33 15 -66 29 -101 40 c-22 7 -44 13 -67 13s-45 -7 -65 -16c-28 -12 -54 -28 -77 -46l-13 18c50 42 109 82 166 121c30 7 55 22 77 40c20 17 29 41 35 65c10 36 13 74 13 111c0 42 -5 84 -19 124c-11 31 -27 61 -54 84c-25 21 -54 37 -93 37c-30 0 -60 -17 -73 -27c-15 -13 -22 -30 -22 -49 c0 -12 10 -26 18 -36c13 -16 26 -32 42 -46c26 -24 52 -59 52 -80c0 -11 -1 -21 -7 -30c-8 -14 -16 -28 -28 -39c-36 -32 -75 -57 -119 -82l-16 19c19 12 29 21 43 38c10 12 18 26 18 41c0 16 -27 46 -46 67c-14 15 -31 29 -42 46c-7 9 -7 23 -7 35c0 23 11 44 24 64 c18 27 42 51 67 73c45 38 109 69 187 69c52 0 93 -13 126 -41c32 -27 57 -69 66 -107c42 40 87 81 137 111c29 17 60 35 94 35c17 0 34 -5 50 -10c22 -8 43 -19 63 -31s49 -29 64 -29c21 0 41 3 61 14zM832 527c-40 0 -76 16 -109 35c-28 17 -62 36 -93 36 c-22 0 -40 -14 -55 -27c-28 -22 -50 -49 -70 -76c1 -14 2 -29 2 -43c0 -65 -12 -130 -45 -190l-117 -88c41 -1 143 -46 213 -75c47 -20 105 -44 148 -44c13 0 29 4 38 12c10 8 13 22 13 34c0 17 -2 38 -5 56l-23 129c-4 21 -4 41 -4 62c0 29 0 71 12 86c27 31 62 63 95 93"],120194:[682,30,1235,31,1240,"1240 628l-1 -25c-36 -26 -83 -61 -112 -98c-18 -22 -24 -47 -24 -74c0 -23 10 -62 18 -93c11 -42 21 -81 21 -124c0 -22 -2 -43 -7 -65c-4 -15 -9 -29 -20 -41l-181 -123c-24 -8 -48 -13 -73 -13c-35 0 -75 14 -110 25c-58 19 -116 35 -177 42c-42 -19 -87 -42 -126 -64 c-10 -4 -22 -5 -35 -5c-52 0 -100 20 -145 43c-32 16 -67 32 -99 32c-21 0 -40 -6 -57 -15c-24 -11 -45 -27 -65 -44l-13 22c51 48 109 90 167 132c54 0 90 14 122 41c27 23 42 53 53 84c14 42 18 86 18 129c0 67 -15 136 -72 184c-27 24 -60 39 -103 39 c-29 0 -61 -10 -74 -20c-15 -13 -22 -30 -22 -49c0 -12 10 -26 18 -36c13 -16 26 -32 42 -46c26 -24 52 -59 52 -80c0 -11 -1 -21 -7 -30c-8 -14 -16 -28 -28 -39c-36 -32 -75 -57 -119 -82l-16 19c19 12 29 21 43 38c10 12 18 26 18 41c0 16 -27 46 -46 67 c-14 15 -31 29 -42 46c-7 9 -7 23 -7 35c0 23 11 44 24 64c18 27 42 51 67 73c45 38 109 64 185 64c63 0 111 -19 151 -53c24 -20 41 -46 52 -73c64 45 111 76 178 114c35 -19 60 -48 101 -110c37 32 78 65 122 91c28 15 56 31 90 31c37 0 70 -16 101 -33 c30 -16 62 -33 98 -33c11 0 30 7 40 12zM699 349c0 42 -3 85 -14 126c-8 28 -18 55 -42 76c-15 12 -32 22 -50 31c-30 -18 -53 -36 -74 -53c3 -14 4 -48 4 -62c0 -93 -22 -185 -88 -261l-135 -75c16 -5 67 -26 99 -42c36 -18 76 -34 119 -34c7 0 14 2 21 5 c31 14 67 30 87 50c27 27 48 57 58 91c15 50 15 96 15 148zM1104 527c-2 -1 -14 -1 -24 -1c-23 0 -64 20 -81 33c-21 16 -70 28 -89 28c-20 0 -42 -3 -59 -12c-21 -13 -35 -25 -51 -43c9 -36 14 -81 14 -118c0 -70 -20 -137 -50 -202c-28 -37 -50 -62 -76 -82 c48 0 138 -32 200 -54c28 -10 57 -20 88 -20c13 0 25 6 34 12c12 8 17 21 17 52c0 34 -13 96 -25 140c-12 41 -20 81 -20 120c0 19 2 42 54 86"],120195:[682,35,849,32,835,"835 578l-131 -64c-48 17 -83 50 -109 87c-76 -58 -106 -93 -106 -235c65 0 146 0 194 4l-64 -58c-43 3 -109 3 -135 3c0 -79 20 -160 87 -217c30 -26 73 -38 116 -38c32 0 75 16 110 30v-24c-46 -27 -89 -59 -132 -91c-23 -7 -61 -10 -64 -10c-41 0 -80 14 -109 38 c-18 15 -31 34 -43 52c-16 27 -29 56 -40 84c-58 -54 -120 -106 -193 -146c-20 -11 -48 -25 -64 -25c-31 0 -82 30 -120 53l81 81c17 -15 36 -29 58 -39c15 -8 32 -14 51 -14c14 0 28 4 40 10c18 9 34 21 49 33c26 23 39 56 48 86c13 45 17 91 17 137c-70 0 -157 0 -209 -3 l65 56l146 -2c0 38 -2 77 -12 115c-7 24 -17 46 -38 64s-47 33 -77 33c-37 0 -68 -11 -93 -33l-41 -37l-18 20c53 54 112 104 173 152c51 -5 102 -17 139 -48c34 -29 60 -66 65 -107c25 30 54 57 84 83c39 33 87 57 139 74c14 -30 29 -60 56 -82c10 -9 25 -10 39 -10 s30 4 41 7v-19"],120196:[689,214,983,32,879,"879 339c0 -123 -27 -246 -82 -360c-51 -56 -111 -105 -183 -142c-59 -30 -125 -51 -194 -51c-61 0 -121 17 -164 53l50 97l15 -2c9 -25 21 -50 43 -69c27 -23 66 -31 103 -31c60 0 119 16 162 52c49 42 72 98 89 154c25 82 31 166 31 251c0 53 -2 107 -14 159 c-7 31 -16 63 -40 86c-21 20 -49 31 -80 31c-20 0 -50 -6 -67 -17c-21 -13 -30 -20 -44 -38c4 -18 6 -48 6 -66c0 -77 -12 -151 -59 -217c-46 -34 -100 -70 -144 -96c10 -1 68 -21 98 -34c27 -12 58 -20 87 -20c17 0 32 8 46 15c24 12 45 27 66 43l6 -21 c-46 -55 -97 -107 -164 -143c-58 9 -113 32 -165 57c-27 13 -64 20 -91 20c-23 0 -46 -7 -66 -17c-27 -13 -50 -31 -72 -50l-19 19c50 50 100 100 160 142c14 3 26 4 39 4s27 -1 43 -2c18 9 40 21 58 36c23 21 30 49 37 77c10 46 13 93 13 139c0 69 -23 138 -81 187 c-23 20 -49 37 -85 37c-31 0 -58 -15 -71 -25c-15 -13 -22 -30 -22 -49c0 -12 10 -26 18 -36c13 -16 26 -32 42 -46c26 -24 52 -59 52 -80c0 -11 -1 -21 -7 -30c-8 -14 -16 -28 -28 -39c-36 -32 -75 -57 -119 -82l-16 19c19 12 29 21 43 38c10 12 18 26 18 41 c0 16 -27 46 -46 67c-14 15 -31 29 -42 46c-7 9 -7 23 -7 35c0 23 11 44 24 64c18 27 42 51 67 73c45 38 111 65 189 65c52 0 92 -17 125 -45c27 -23 45 -62 57 -96c41 32 84 62 129 91c32 20 65 40 101 56c35 -40 81 -71 131 -95c12 -85 24 -172 24 -255"],120197:[718,137,726,17,633,"633 232c0 -55 -8 -111 -28 -164c-17 -43 -42 -84 -80 -117c-67 -56 -157 -88 -251 -88c-36 0 -81 0 -109 2c-14 2 -34 5 -34 20l1 51c0 11 -4 22 -13 29c-7 7 -17 11 -28 11c-23 0 -45 -9 -64 -20l-10 15c30 22 64 41 100 56c28 11 58 19 89 19c16 0 33 -3 44 -12 c13 -12 14 -37 14 -49c0 -11 -1 -22 -3 -32c-3 -13 -3 -28 -3 -41c0 -10 11 -16 21 -19c6 -2 14 -3 22 -3c45 0 87 16 118 43c28 23 42 60 52 92c15 42 19 87 19 131c0 29 -3 76 -27 96c-35 29 -85 37 -133 37c-36 0 -72 -8 -107 -17l-17 13l56 81c49 9 92 32 127 61 c32 27 43 64 43 101c0 9 -2 18 -10 25c-17 14 -41 18 -64 18c-30 0 -60 -2 -89 -5l-95 -11c-23 0 -45 7 -62 21c-6 5 -12 20 -15 33c27 37 67 84 92 108l18 1c-3 -6 -4 -13 -4 -20c0 -8 3 -17 11 -24c10 -8 25 -10 39 -10l177 6c36 0 74 -5 100 -27c14 -12 24 -28 24 -46 c0 -62 -43 -115 -94 -159c-26 -22 -68 -46 -100 -62c102 0 190 -4 250 -66c14 -14 23 -62 23 -78"],120198:[472,32,602,80,587,"587 82l-145 -111c-20 30 -44 58 -72 83l-1 26l-186 -112c-39 32 -72 68 -102 105c-1 20 -1 39 -1 59c0 79 7 157 20 235c69 30 134 67 197 105c40 -16 83 -25 126 -32l66 28l8 -7c-17 -66 -17 -133 -17 -200l3 -118l52 -63c11 0 32 12 47 20zM368 110l-2 256 c-49 0 -98 11 -145 23c-16 -24 -27 -61 -27 -153c0 -32 2 -65 4 -98c25 -25 52 -52 81 -72c14 0 63 27 89 44"],120199:[691,32,589,86,504,"504 314c0 -73 -5 -146 -19 -218c-87 -48 -175 -94 -271 -128c-46 32 -90 65 -128 103c13 112 13 227 13 340c0 85 -3 171 -13 255l20 8l25 -78c54 34 113 71 167 95l16 -13c-27 -19 -45 -36 -67 -58c-13 -13 -22 -30 -27 -47c-8 -26 -9 -52 -9 -78v-107l9 -4l142 90 c35 -17 92 -41 139 -60c2 -33 3 -67 3 -100zM386 252c0 32 -2 63 -6 95c-40 20 -65 29 -95 37c-22 -4 -55 -23 -72 -36l1 -216c21 -18 47 -40 74 -55c19 -11 39 -20 61 -23c20 40 29 83 33 126c3 25 4 49 4 72"],120200:[473,26,463,87,424,"424 429l-78 -96h-10c-47 42 -90 57 -114 63c-13 -19 -17 -96 -17 -126c0 -38 3 -78 6 -116c32 -29 64 -57 101 -82l106 42v-21c-63 -41 -168 -104 -197 -119c-47 34 -90 73 -130 114c-3 42 -4 87 -4 130c0 53 3 104 13 148l195 107c46 -8 90 -22 129 -44"],120201:[632,29,588,-1,511,"511 368c0 -88 -14 -176 -44 -260c-49 -40 -140 -93 -213 -137c-59 33 -115 71 -165 114c-3 18 -4 45 -4 78c0 46 2 106 7 172c47 43 111 89 165 124l22 -10c-20 -15 -46 -41 -65 -60c-10 -48 -11 -88 -11 -135c0 -35 2 -71 5 -106c39 -34 85 -62 135 -82 c24 40 50 109 50 243c0 34 -2 68 -5 102c-50 40 -103 78 -163 109c-37 20 -78 38 -122 38c-30 0 -63 -9 -90 -20l-14 19c56 30 117 53 178 75c54 -2 99 -24 146 -49c58 -31 126 -71 181 -108c5 -36 7 -72 7 -107"],120202:[471,28,471,80,429,"429 362c-61 -51 -160 -122 -241 -181c0 0 6 -38 18 -45c26 -20 53 -38 101 -68c31 5 75 23 110 40l9 -16c-64 -41 -129 -82 -197 -120c-53 34 -100 73 -144 114c-3 36 -5 72 -5 108c0 59 5 118 14 176l192 101c45 -38 91 -76 143 -109zM324 316c-39 24 -102 71 -117 83 c-15 -69 -21 -114 -21 -185c45 31 96 67 138 102"],120203:[681,242,387,37,387,"387 398l-16 -39h-121c0 -117 0 -236 -19 -347c-3 -15 -10 -32 -18 -46l-113 -208l-20 4c14 34 40 97 45 131c6 38 9 83 9 125v341h-97v8l15 38h73c-13 40 -17 75 -25 115c13 48 34 92 75 127c27 23 65 34 102 34c15 0 31 -5 43 -12c20 -10 37 -23 53 -38l-73 -82h-16 c-11 17 -20 30 -37 43c-11 9 -26 19 -41 19c-14 0 -28 -2 -39 -11c-11 -10 -17 -25 -17 -39c0 -33 26 -61 53 -85c14 -13 27 -26 37 -41c7 -11 9 -16 13 -30h134v-7"],120204:[473,208,594,16,541,"414 -74c0 37 -18 106 -32 157l-198 -133c55 -30 138 -75 177 -75c16 0 33 1 44 11c9 7 9 26 9 40zM541 -22c-61 -66 -128 -128 -202 -184c-10 -1 -21 -2 -31 -2c-32 0 -64 6 -94 14c-51 15 -99 39 -142 65l-38 -24l-18 15l169 115l-93 93c-3 38 -6 77 -6 115 c0 61 6 121 22 181c64 38 129 75 197 107c41 -19 90 -41 128 -47l86 39l13 -4c-15 -30 -22 -62 -27 -94c-7 -45 -9 -90 -9 -135c0 -86 32 -169 45 -254zM388 343c-57 8 -120 35 -162 57c-11 -52 -17 -112 -17 -166c0 -34 2 -68 6 -102c25 -25 55 -54 90 -70l78 49v66 c0 56 2 111 5 166"],120205:[687,203,615,88,507,"507 271l-3 -121c-3 -61 -7 -122 -20 -181c-49 -57 -116 -117 -192 -172l-24 14c22 15 50 39 73 61c18 17 28 44 35 68c9 33 18 87 18 175c0 74 -6 148 -12 222c-32 17 -74 35 -110 42l-54 -32c-3 -39 -5 -78 -5 -117c0 -34 1 -68 5 -102c16 -24 34 -46 54 -68 c-42 -24 -80 -54 -116 -84c-19 30 -42 59 -68 86c9 123 16 246 16 370c0 77 -3 155 -10 232l19 9l24 -76c51 31 107 65 155 90l18 -15c-12 -7 -23 -14 -34 -23c-45 -37 -56 -90 -56 -142l-2 -130c51 28 99 62 144 94c45 -23 93 -43 142 -61"],120206:[686,26,331,2,327,"327 87c-57 -34 -110 -74 -162 -113l-73 82c7 58 10 117 10 176c0 45 -2 89 -5 134l-38 21l-39 -17l-18 12c47 31 93 61 136 96c13 -10 54 -38 82 -56c-3 -56 -5 -112 -5 -169l3 -115c0 -10 5 -20 12 -29c8 -11 20 -27 31 -27c5 0 15 3 17 4l44 19zM242 615l-80 -75 c-32 22 -62 47 -89 73l91 73c23 -25 50 -50 78 -71"],120207:[683,207,331,-19,238,"238 256c0 -95 -6 -191 -25 -285c-62 -61 -142 -127 -216 -178l-16 18c34 21 70 46 95 71c27 27 37 64 40 100c5 69 5 157 5 235c0 37 0 90 -3 110c-1 15 -4 30 -16 41c-8 7 -19 12 -31 12l-38 -17l-15 19l130 101c28 -21 55 -39 85 -58c3 -56 5 -112 5 -169zM235 614 l-81 -73c-28 22 -56 44 -80 70l81 72c10 -10 53 -46 80 -69"],120208:[683,25,464,33,432,"432 90l-198 -115l-129 96c2 15 3 29 4 44l5 193h-81l14 38h68c0 107 0 244 -11 319l21 8l27 -76c45 34 97 62 150 86l15 -16c-26 -14 -41 -26 -57 -47c-11 -15 -20 -31 -23 -48c-6 -23 -7 -49 -7 -73c41 29 95 64 138 86c16 -19 32 -39 44 -60c8 -14 13 -25 13 -41 c0 -21 -15 -38 -30 -54l-82 -84h96l-17 -38h-166c0 -55 1 -118 7 -172l90 -68c10 0 69 25 100 41zM336 435c0 30 -20 60 -41 83c-16 -10 -32 -21 -43 -34c-12 -15 -16 -35 -18 -52c-3 -28 -4 -57 -4 -86h32c16 0 36 19 49 33c16 17 25 36 25 56"],120209:[682,24,336,100,315,"315 86l-151 -110l-64 79c8 73 11 162 11 257l-8 349l19 4l20 -72c47 32 100 63 153 89l11 -9c-14 -10 -30 -26 -39 -35c-17 -17 -26 -34 -32 -56c-8 -31 -11 -63 -11 -95l-2 -157c0 -76 2 -151 9 -218l26 -29c9 0 32 9 49 18"],120210:[476,31,921,16,900,"900 78c-58 -33 -112 -70 -165 -108c-22 31 -47 61 -75 89c11 34 14 92 14 154l-3 131c-30 13 -75 30 -107 38l-55 -34c-3 -39 -4 -76 -4 -115l2 -102l57 -73l-120 -86l-59 81c8 43 10 99 10 157l-2 136c-25 11 -75 24 -105 31l-61 -34v-65c0 -51 2 -102 5 -152l55 -74 l-114 -83c-21 31 -43 63 -66 90c6 45 8 95 8 146l-4 152l-36 27l-46 -19l-13 13l135 98c23 -21 49 -40 78 -56c-2 -17 -4 -51 -4 -51c54 30 104 64 150 103c45 -21 90 -43 139 -56l-3 -40l147 95c47 -21 95 -39 144 -53c-10 -56 -14 -112 -14 -168c0 -42 2 -84 5 -126 l28 -38c4 -4 11 -9 17 -9c0 0 11 3 15 5l34 18"],120211:[474,28,653,3,608,"608 88c-52 -32 -104 -74 -153 -114c-22 32 -52 64 -80 94c9 48 12 99 12 151c0 42 -2 84 -4 126c-31 15 -74 27 -108 36l-62 -31l1 -221l61 -67l-112 -90c-24 26 -49 53 -78 76c8 25 10 69 10 120c0 64 -4 139 -7 198l-24 22l-43 -21l-18 14l130 93 c26 -18 55 -38 82 -50c-1 -16 -1 -48 -1 -48c53 29 103 62 151 98c41 -22 96 -44 147 -63c-8 -70 -11 -140 -11 -211l1 -68l47 -52c10 3 35 15 51 24"],120212:[482,34,609,107,515,"515 337c0 -39 -1 -77 -4 -116c-3 -44 -8 -89 -21 -132c-73 -48 -153 -89 -236 -123c-52 30 -101 62 -146 99c-1 20 -1 39 -1 59c0 80 8 159 22 238c71 38 140 78 207 120c54 -33 115 -56 178 -72zM405 272c0 27 -2 55 -4 82c-51 7 -99 24 -145 43 c-15 -25 -31 -79 -31 -159c0 -35 3 -71 7 -107c42 -33 90 -62 145 -76c19 71 28 144 28 217"],120213:[558,208,603,-2,519,"519 310c0 -85 -7 -143 -30 -226c-55 -29 -105 -65 -152 -103c-29 22 -65 36 -104 41c0 -61 4 -122 9 -183l-113 -47l-17 6l6 234c-41 0 -78 -18 -108 -41l-12 17c36 33 83 77 119 89v262c-23 21 -45 51 -52 78c30 44 68 83 108 121l20 -6c-10 -13 -19 -27 -19 -42 c0 -14 7 -26 15 -37c12 -17 27 -32 44 -45v-42l143 96l135 -57c5 -26 8 -87 8 -115zM404 202c0 50 -4 100 -10 147c-33 14 -76 32 -111 42l-50 -35v-226c54 -8 109 -25 157 -53c11 42 14 80 14 125"],120214:[485,212,595,87,515,"515 479c-30 -103 -30 -291 -30 -438c0 -71 4 -141 9 -212l-113 -41l-19 10c12 93 14 187 14 280l-188 -108l-98 103c-2 22 -3 44 -3 67c0 77 11 155 27 232l194 106c40 -16 82 -29 125 -36l70 43zM378 358c-55 4 -114 23 -144 39c-16 -29 -28 -92 -28 -187 c0 -20 1 -41 1 -62c21 -28 48 -61 76 -83c30 11 62 29 90 45c0 83 0 173 5 248"],120215:[473,26,459,12,453,"453 394c-28 -26 -66 -51 -102 -74c-3 -2 -6 -4 -12 -4c-15 0 -23 6 -38 18l-46 39c-5 -3 -14 -10 -18 -14c-2 -18 -4 -73 -4 -123l2 -89c1 -11 8 -21 16 -29l62 -48l89 31l1 -21l-173 -106c-38 37 -81 70 -125 102c12 43 13 111 13 146c0 44 -1 74 -4 118 c-15 15 -29 29 -47 40l-44 -12l-11 14l139 91l85 -62v-31l107 91c16 -23 32 -49 52 -66c6 -5 14 -7 22 -7c9 0 25 5 36 9v-13"],120216:[480,35,522,-24,482,"482 442l-143 -100c-18 0 -41 9 -60 17c-29 14 -56 31 -82 49c-5 -23 -9 -46 -9 -69c0 -33 9 -70 28 -100c51 17 104 40 155 63c22 -10 43 -22 62 -36c5 -33 9 -67 9 -101c0 -41 -5 -82 -19 -121c-67 -23 -134 -48 -196 -79c-25 21 -52 41 -83 56c-22 11 -45 20 -71 20 c-32 0 -61 -15 -86 -31l-11 16l126 99c17 4 34 6 52 6c39 0 116 -55 165 -93c4 23 5 50 5 74c0 27 -2 55 -5 83c-15 15 -33 30 -57 30c-16 0 -30 -5 -44 -11l-95 -47c-15 13 -29 33 -41 51c-4 6 -5 13 -5 20v139l187 103l66 -38c17 -10 37 -14 57 -14c24 0 60 17 84 33"],120217:[654,27,393,47,407,"407 84l-178 -111c-32 32 -67 61 -106 87c6 74 6 200 6 299h-82v8l15 38h67v148l166 101l12 -14c-20 -16 -40 -34 -47 -42c-9 -9 -14 -118 -14 -193h107v-7l-16 -39h-92c0 -75 0 -149 5 -223c1 -7 3 -14 8 -19c17 -20 47 -39 70 -44c8 1 53 20 75 31"],120218:[473,35,588,9,604,"604 88l-159 -120l-64 105c-62 -25 -125 -73 -163 -108c-42 32 -105 60 -151 77c16 23 27 47 35 73c10 36 17 73 17 109c0 45 -9 88 -19 132c-11 12 -23 23 -42 28l-35 -19l-14 14l134 94c3 -3 52 -40 78 -59l4 -155c0 -31 -3 -62 -9 -93c-5 -21 -12 -43 -26 -62 c33 -7 79 -26 117 -42l67 36l-4 326l121 44l14 -9c-15 -100 -17 -200 -17 -301c0 -9 14 -54 37 -74c0 0 7 -5 11 -5c0 0 10 1 12 2l50 20"],120219:[546,28,604,56,507,"507 316c0 -36 -1 -72 -5 -108c-5 -44 -11 -88 -33 -128l-253 -108c-35 36 -76 67 -121 94c9 37 9 97 9 146c0 46 -3 96 -6 143c-19 21 -42 47 -42 67c0 34 59 87 101 124l20 -4c-11 -12 -18 -27 -18 -42c0 -21 36 -58 62 -84l-2 -29l146 89l139 -61c2 -33 3 -66 3 -99z M389 263c0 28 -1 57 -3 86c-31 16 -87 40 -98 40c-24 0 -51 -16 -69 -28v-209c0 -6 0 -14 2 -19c3 -7 9 -13 16 -19c23 -19 58 -38 92 -55c3 -1 10 -3 14 -3c3 0 14 2 17 11c22 64 29 129 29 196"],120220:[549,33,917,55,815,"815 409c-1 -75 -2 -150 -13 -225c-6 -44 -15 -89 -39 -128c-71 -27 -140 -55 -206 -89c-32 20 -65 39 -99 55c-25 12 -51 22 -79 29c10 35 12 111 12 186l-1 95c-28 20 -90 50 -97 50c-13 0 -43 -11 -67 -27c-2 -28 -3 -57 -3 -85c0 -52 3 -104 7 -157l51 -63l-109 -80 c-25 32 -56 71 -82 89c11 29 19 86 19 157c0 46 -3 93 -6 140l-37 39c-8 9 -11 22 -11 33c0 4 1 8 4 11c30 38 65 80 100 110l16 -2c-10 -14 -15 -31 -15 -47c0 -21 40 -57 68 -81v-37c38 22 92 59 137 89c46 -26 95 -48 146 -67v-34l168 100c43 -23 89 -43 136 -61z M697 336c-33 17 -96 42 -106 42c-11 0 -54 -18 -77 -33c-4 -41 -6 -83 -6 -124l2 -68c1 -12 6 -25 18 -32c34 -22 70 -41 110 -57c2 -1 8 -3 11 -3c0 0 10 4 15 17c14 34 21 69 25 104c7 51 8 102 8 154"],120221:[471,188,458,8,449,"449 388l-79 -75c-10 2 -20 4 -28 9c-22 14 -56 43 -61 48c-3 -2 -21 -18 -25 -24c-4 -24 -6 -49 -6 -85c0 -43 0 -97 3 -129c0 0 2 -6 3 -8c4 -7 23 -20 78 -56c12 0 56 18 87 32l2 -18l-179 -111l-91 73c-19 -3 -33 -15 -46 -27c-17 -15 -24 -35 -24 -55 c0 -13 6 -26 17 -36c35 -31 86 -40 136 -48l2 -11l-93 -55c-29 7 -58 17 -84 31c-18 10 -36 23 -46 40c-5 8 -7 17 -7 26c0 10 2 20 5 30c34 46 70 92 115 131c6 43 6 92 6 138c0 44 -1 88 -5 131c0 7 -6 16 -9 18l-46 24c-9 0 -30 -10 -39 -16l-16 12c46 33 107 77 142 94 l92 -58v-36l112 91c23 -31 52 -62 84 -80"],120222:[559,222,589,60,515,"515 289c0 -88 -5 -170 -12 -250c0 -40 -29 -73 -57 -105c-30 -34 -62 -68 -100 -98c-28 -22 -67 -43 -97 -58l-30 25c33 16 82 41 91 48c23 19 39 39 48 66c7 19 14 43 18 63c12 67 15 134 15 202c0 52 -3 104 -7 156c-29 18 -61 34 -96 43l-59 -25v-215 c0 -11 0 -24 8 -33l44 -55l-95 -92c-26 31 -68 78 -95 99c18 57 18 196 18 295c0 4 -2 10 -4 12c-20 19 -45 44 -45 65c0 14 9 27 18 39c17 22 56 60 85 88l18 -6c-10 -15 -17 -29 -17 -46c0 -13 12 -30 22 -42c15 -17 27 -27 45 -39v-41l140 93c45 -25 92 -47 141 -66"],120223:[472,215,461,-8,377,"377 74c0 -60 -12 -117 -31 -177l-156 -112c-55 0 -113 6 -153 40c-20 17 -35 38 -45 60c68 98 151 188 250 264c-33 23 -89 32 -133 35l-1 10l92 65c17 12 35 27 35 47c0 17 -17 40 -33 50c-18 11 -29 15 -51 15c-13 0 -25 -5 -37 -10c-18 -8 -34 -19 -49 -30l-16 15 c53 47 112 88 173 126c28 -10 54 -22 75 -40c22 -19 41 -40 41 -67c0 -12 -18 -29 -32 -42l-64 -59c42 -13 81 -30 119 -50c11 -46 16 -93 16 -140zM259 -4c0 33 -2 74 -7 124c-61 -50 -150 -123 -150 -166c0 -20 12 -36 29 -49c51 -38 99 -39 108 -39c6 13 20 44 20 130"]};MathJax.Ajax.loadComplete(MathJax.OutputJax.SVG.fontDir+"/Fraktur/Regular/Main.js"); | PypiClean |
/CatLearn-0.6.2.tar.gz/CatLearn-0.6.2/catlearn/fingerprint/prototype.py | from __future__ import absolute_import
from __future__ import division
import pandas as pd
import os
class PrototypeSites(object):
"""Prototype site objective for generating prototype input."""
def __init__(self, site_dict=None):
if site_dict is None:
site_dict = {}
# default is perovskite
site_dict['A'] = [1]
site_dict['B'] = [1]
site_dict['C'] = [3, '-omit']
self.site_dict = site_dict
self.site_list = site_dict.keys()
self.site_list.sort()
temp_str = []
for si in self.site_list:
temp_str.append(' '.join([str(x) for x in self.site_dict[si]]))
self.site_str = '\n'.join(temp_str)
class PrototypeFingerprintGenerator(object):
"""Function to build prototype fingerprint in pandas.DataFrame.
Based on a list of ase.atoms object.
"""
def __init__(self, atoms, sites, system_name='', target='id',
delete_temp=True, properties=[]):
"""Initialize voronoi generator.
Parameters
__________
atoms: list
list of structures in ase.atoms.
sites: PrototypeSites
PrototypeSites including all site informations.
"""
self.atoms = atoms
self.sites = sites
self.system_name = system_name
self.temp_path = 'proto_temp' + system_name
from catlearn import __path__
if os.path.exists(__path__[0] + '/api/magpie'):
self.magpie_path = __path__[0] + '/api/magpie'
else:
raise EnvironmentError('Magpie path not exist!')
self.target = target
self.properties = properties
self.txt_path = self.temp_path + '/prototypes.txt'
self.proto_input = '''data = new data.materials.PrototypeDataset %s/site-info.txt
data attributes properties directory %s/lookup-data
data attributes properties add set general
data import ./%s
data target %s
data attributes generate
save data ./%s/proto_FP csv
exit''' % (self.temp_path, self.magpie_path, self.txt_path, self.target,
self.temp_path)
self.magpie = 'java -jar %s/Magpie.jar' % self.magpie_path
self.delete_temp = delete_temp
def write_proto_input(self):
"""Write Prototype input for Magpie."""
if not os.path.exists(self.temp_path):
os.mkdir(self.temp_path)
else:
import shutil
shutil.rmtree(self.temp_path)
os.mkdir(self.temp_path)
pro_dict = {}
for pro in self.properties:
pro_dict[pro] = []
# check whether all sites are given in atoms
for si in self.sites.site_list:
if si not in self.properties:
raise ValueError('No information for %s' % si)
fml = []
id_list = []
i = 0
for at in self.atoms:
at_name = ''
for si in self.sites.site_list:
at_name = at_name + \
getattr(at, si) + str(self.sites.site_dict[si][0])
at_name = at_name.replace('1', '')
fml.append(at_name)
for pro in self.properties:
if hasattr(at, pro):
pro_dict[pro].append(getattr(at, pro))
else:
pro_dict[pro].append(None)
id_list.append(i)
i += 1
pro_dict['formula'] = fml
pro_dict['id'] = id_list
temp_pd = pd.DataFrame.from_dict(pro_dict)
temp_pd = temp_pd[['formula', 'id'] + self.properties]
self.input_pd = temp_pd
temp_pd.to_csv(self.txt_path, sep=' ', index=False)
f = open(self.temp_path + '/prototype_FP.in', 'w')
f.writelines(self.proto_input)
f.close()
f = open(self.temp_path + '/site-info.txt', 'w')
f.write(self.sites.site_str)
f.close()
def run_proto(self):
"""Call Magpie to generate Prototype FP and write to proto_FP.csv."""
os.system("%s %s/prototype_FP.in |tee %s/prototype_FP.log" % (
self.magpie, self.temp_path, self.temp_path))
def update_str(self):
self.proto_input = '''data = new data.materials.PrototypeDataset %s/site-info.txt
data attributes properties directory %s/lookup-data
data attributes properties add set general
data import ./%s
data target %s
data attributes generate
save data ./%s/proto_FP csv
exit''' % (self.temp_path, self.magpie_path, self.txt_path,
self.target, self.temp_path)
def generate(self):
"""Generate Prototype fingerprint and return all the fingerprint.
Returns
-------
FP : pandas.Frame
"""
print('Generate Prototype fingerprint of %d structures' %
len(self.atoms))
self.update_str()
self.write_proto_input()
self.run_proto()
try:
FP = pd.read_csv(self.temp_path + '/proto_FP.csv')
except:
raise EnvironmentError(
'Pelase install Java! https://java.com/en/download/')
if self.delete_temp:
import shutil
shutil.rmtree(self.temp_path)
return FP
def generate_all(self):
self.target = 'id'
temp_FP = self.generate()
return pd.merge(self.input_pd, temp_FP, left_on='id', right_on='id') | PypiClean |
/CLEMENTDNA-1.0.4.tar.gz/CLEMENTDNA-1.0.4/CLEMENT/visualizationsingle.py | import palettable
import matplotlib
import seaborn as sns
import numpy as np
from scipy.stats import kde
import extract
from sklearn.decomposition import TruncatedSVD, PCA
from mpl_toolkits import mplot3d
def drawfigure_1d(membership, output_suptitle, output_filename, np_vaf, samplename_dict, includefp, fp_index, makeone_index, **kwargs):
vivid_10 = palettable.cartocolors.qualitative.Vivid_10.mpl_colors
bdo = palettable.lightbartlein.diverging.BlueDarkOrange18_18.mpl_colors
tabl = palettable.tableau.Tableau_20.mpl_colors
Gr_10 = palettable.scientific.sequential.GrayC_20.mpl_colors
colorlist = [i for i in tabl]
# font_dir = "/home/goldpm1/miniconda3/envs/cnvpytor/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/"
# font_dirs = matplotlib.font_manager.findSystemFonts(fontpaths=font_dir, fontext='ttf')
# for font in font_dirs:
# matplotlib.font_manager.fontManager.addfont(font)
#print (matplotlib.font_manager.FontProperties(fname = font).get_name())
matplotlib.rcParams["font.family"] = kwargs["FONT_FAMILY"]
matplotlib.pyplot.style.use("seaborn-white")
if includefp == True:
colorlist[ fp_index ] = Gr_10[8]
fig, ax = matplotlib.pyplot.subplots (figsize = (6, 6))
matplotlib.pyplot.suptitle(output_suptitle, fontsize = 20)
max_y = 0
x = np.linspace(0, 2, 200)
for k in sorted(list(set(membership))):
np_vaf_new_index, np_vaf_new = extract.npvaf(np_vaf, membership, k)
try:
kde_np_vaf_new = kde.gaussian_kde(np_vaf_new[:, 0] * 2)
weight = len(np_vaf_new) / len(np_vaf)
y = kde_np_vaf_new(x) * weight
if max_y < np.max(y):
max_y = np.max(y)
if k in makeone_index:
ax.plot(x, y, color=colorlist[k], linewidth=5, label=samplename_dict[k])
else:
ax.plot(x, y, color=colorlist[k], linewidth=5, label=samplename_dict[k], linestyle="-.")
#ax.plot(x, y, color=colorlist[k], label=samplename_dict[k], linewidth = 5)
ax.text(np.argmax(y) / 100, np.max(y) * 1.08, "{} (n = {})".format(np.argmax(y) / 100, np.bincount(membership)[k]), verticalalignment='top', ha = "center", fontdict = {"fontsize": 16, "fontweight" : "bold"})
except:
continue
ax.axis([0, 1, 0, max_y * 1.3])
ax.spines["top"].set_visible(False)
ax.spines["right"].set_visible(False)
ax.spines["bottom"].set_linewidth (3)
ax.legend()
ax.set_xlabel("Mixture ( = VAF * 2)", fontdict = {"fontsize" : 14})
ax.set_ylabel("Density", fontdict = {"fontsize" : 14})
if output_filename != "NotSave":
matplotlib.pyplot.savefig(output_filename)
matplotlib.pyplot.show()
def drawfigure_2d(membership, output_suptitle, output_filename, np_vaf, samplename_dict, includefp, fp_index, dimensionreduction="None", **kwargs):
vivid_10 = palettable.cartocolors.qualitative.Vivid_10.mpl_colors
bdo = palettable.lightbartlein.diverging.BlueDarkOrange18_18.mpl_colors
tabl = palettable.tableau.Tableau_20.mpl_colors
Gr_10 = palettable.scientific.sequential.GrayC_20.mpl_colors
colorlist = [i for i in tabl]
# font_dir = "/home/goldpm1/miniconda3/envs/cnvpytor/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf/"
# font_dirs = matplotlib.font_manager.findSystemFonts(fontpaths=font_dir, fontext='ttf')
# for font in font_dirs:
# matplotlib.font_manager.fontManager.addfont(font)
#print (matplotlib.font_manager.FontProperties(fname = font).get_name())
matplotlib.rcParams["font.family"] = kwargs["FONT_FAMILY"]
matplotlib.pyplot.style.use("seaborn-white")
fig, ax = matplotlib.pyplot.subplots (figsize = (6, 6))
if dimensionreduction == "SVD":
print("SVD → 2D")
tsvd = TruncatedSVD(n_components=2)
tsvd.fit(np_vaf)
np_vaf = tsvd.transform(np_vaf)
ax.axis([np.min(np_vaf[:, 0]) * 2.1, np.max(np_vaf[:, 0]) * 2.1, np.min(np_vaf[:, 1]) * 2.1, np.max(np_vaf[:, 1]) * 2.1])
ax.set_xlabel("SVD1", fontdict = {"fontsize" : 14})
ax.set_ylabel("SVD2", fontdict = {"fontsize" : 14})
elif dimensionreduction == "PCA":
print("PCA → 2D")
pca = PCA(n_components=2)
pca.fit(np_vaf)
np_vaf = pca.transform(np_vaf)
ax.axis([np.min(np_vaf[:, 0]) * 2.1, np.max(np_vaf[:, 0]) * 2.1, np.min(np_vaf[:, 1]) * 2.1, np.max(np_vaf[:, 1]) * 2.1])
ax.set_xlabel("PC1", fontdict = {"fontsize" : 14})
ax.set_ylabel("PC2", fontdict = {"fontsize" : 14})
else:
ax.axis([0, np.max(np_vaf[:, :]) * 2.1, 0, np.max(np_vaf[:, :]) * 2.1])
ax.set_xlabel("Mixture ( = VAF x 2) of Sample 1", fontdict = {"fontsize" : 14})
ax.set_ylabel("Mixture ( = VAF x 2) of Sample 2", fontdict = {"fontsize" : 14})
#matplotlib.pyplot.suptitle("{} (n = {})".format(output_filename, len(membership)), fontsize = "large")
#print (membership)
fig.suptitle(output_suptitle, fontsize = 20)
if includefp == True:
outlier_color_num = samplename_dict[ fp_index ]
colorlist [ outlier_color_num ] = Gr_10[8]
ax.scatter(np_vaf[:, 0] * 2, np_vaf[:, 1] * 2, color=[colorlist[samplename_dict[k]] for k in membership], s = 40)
if (dimensionreduction != "SVD") & ( dimensionreduction != "PCA" ):
for sample_index, sample in enumerate(samplename_dict):
if sample not in set(membership):
continue
x_mean = round(np.mean(np_vaf[[x for x in range( len(membership)) if membership[x] == sample]][:, 0] * 2), 2)
y_mean = round(np.mean(np_vaf[[x for x in range( len(membership)) if membership[x] == sample]][:, 1] * 2), 2)
ax.text(x_mean, y_mean, "{0}".format([x_mean, y_mean]), verticalalignment='top', ha = "center", fontdict = {"fontsize": 16, "fontweight" : "bold"})
ax.scatter(x_mean, y_mean, marker='*', color=colorlist[samplename_dict[sample]], edgecolor='black', s=220, label=str(sample) + " : " + str(list(membership).count(sample)))
ax.legend()
if output_filename != "NotSave":
fig.savefig(output_filename)
#matplotlib.pyplot.show()
def main(output_filename, np_vaf, membership_answer, **kwargs):
global NUM_BLOCK_INPUT, NUM_BLOCK, RANDOM_PICK, NUM_MUTATION, FP_RATIO, INPUT_DIR, OUTPUT_DIR
NUM_BLOCK_INPUT = kwargs["NUM_BLOCK_INPUT"]
NUM_BLOCK = kwargs["NUM_BLOCK"]
RANDOM_PICK = kwargs["RANDOM_PICK"]
NUM_MUTATION = RANDOM_PICK
FP_RATIO = kwargs["FP_RATIO"]
INPUT_DIR = "/data/project/Alzheimer/EM_cluster/pilot/04.EM_input/"
OUTPUT_DIR = "./output/"
if NUM_BLOCK == 2:
drawfigure_2d(membership_answer, output_filename, np_vaf)
# if NUM_BLOCK == 3:
# drawfigure_3d(membership_answer, output_filename, np_vaf) | PypiClean |
/IcoCube-0.0.1a6.tar.gz/IcoCube-0.0.1a6/ICO3PluginCollection/MessageView/MessageFramePlugin.py |
import sys
import tkinter
from ICO3Plugin.Plugin.ICO3FramePlugin import ICO3FramePlugin
from ICO3Utilities.Debug.LogDebug import ICO3Log
try:
import Tkinter as tk
except ImportError:
import tkinter as tk
try:
import ttk
py3 = False
except ImportError:
import tkinter.ttk as ttk
py3 = True
class MessageFramePlugin(ICO3FramePlugin):
theFrame = None
theMainframe = None
# thePluginFrame = None
theSenderHeader = None
theReceiver = None
def on_closing(self):
# ICO3Log.print("Plugin","Close Application")
# self.removeMe()
# sys.exit(0)
pass
def __init__(self, xMainFrame = None, XY = None) : #, top=None):
# tk.Toplevel.__init__(self)
'''This class configures and populates the toplevel window.
top is the toplevel containing window.'''
if xMainFrame is not None:
self.theMainframe = xMainFrame
if self.theMainframe is None:
return
XYPos = "154,120"
if XY != None:
XYPos = XY
pass
xGeometry = "600x450" + self.getXYExtention(XYPos)
ICO3Log.print("Plugin","Geometrie MessageFrame : " + xGeometry)
_bgcolor = '#d9d9d9' # X11 color: 'gray85'
_fgcolor = '#000000' # X11 color: 'black'
_compcolor = '#d9d9d9' # X11 color: 'gray85'
_ana1color = '#d9d9d9' # X11 color: 'gray85'
_ana2color = '#ececec' # Closest X11 color: 'gray92'
self.style = ttk.Style()
if sys.platform == "win32":
self.style.theme_use('winnative')
self.style.configure('.',background=_bgcolor)
self.style.configure('.',foreground=_fgcolor)
self.style.map('.',background=
[('selected', _compcolor), ('active',_ana2color)])
self.theFrame = tk.Toplevel(self.theMainframe)
self.theFrame.geometry(xGeometry) #top
self.theFrame.title("Icocube Test")
self.theFrame.configure(background="#d9d9d9")
self.theFrame.configure(highlightbackground="#d9d9d9")
self.theFrame.configure(highlightcolor="black")
self.theFrame.protocol("WM_DELETE_WINDOW", self.on_closing)
self.Label1 = tk.Label(self.theFrame)
self.Label1.place(relx=0.067, rely=0.0, height=26, width=64)
self.Label1.configure(activebackground="#f9f9f9")
self.Label1.configure(activeforeground="black")
self.Label1.configure(background="#d9d9d9")
self.Label1.configure(disabledforeground="#a3a3a3")
self.Label1.configure(foreground="#000000")
self.Label1.configure(highlightbackground="#d9d9d9")
self.Label1.configure(highlightcolor="black")
self.Label1.configure(text='''Message''')
self.BTSendMessge = tk.Button(self.theFrame)
self.BTSendMessge.place(relx=0.75, rely=0.244, height=33, width=107)
self.BTSendMessge.configure(activebackground="#ececec")
self.BTSendMessge.configure(activeforeground="#000000")
self.BTSendMessge.configure(background="#d9d9d9")
self.BTSendMessge.configure(command=self.SendtheMessage)
self.BTSendMessge.configure(disabledforeground="#a3a3a3")
self.BTSendMessge.configure(foreground="#000000")
self.BTSendMessge.configure(highlightbackground="#d9d9d9")
self.BTSendMessge.configure(highlightcolor="black")
self.BTSendMessge.configure(pady="0")
self.BTSendMessge.configure(text='''Send Message''')
self.Label2 = tk.Label(self.theFrame)
self.Label2.place(relx=0.2, rely=0.289, height=26, width=167)
self.Label2.configure(background="#d9d9d9")
self.Label2.configure(disabledforeground="#a3a3a3")
self.Label2.configure(foreground="#000000")
self.Label2.configure(text='''Messages processed List''')
self.STextMessage = ScrolledText(self.theFrame)
self.STextMessage.place(relx=0.2, rely=0.022, relheight=0.209
, relwidth=0.742)
self.STextMessage.configure(background="white")
self.STextMessage.configure(font="TkTextFont")
self.STextMessage.configure(foreground="black")
self.STextMessage.configure(highlightbackground="#d9d9d9")
self.STextMessage.configure(highlightcolor="black")
self.STextMessage.configure(insertbackground="black")
self.STextMessage.configure(insertborderwidth="3")
self.STextMessage.configure(selectbackground="#c4c4c4")
self.STextMessage.configure(selectforeground="black")
self.STextMessage.configure(width=10)
self.STextMessage.configure(wrap="none")
self.STextMessageList = ScrolledText(self.theFrame)
self.STextMessageList.place(relx=0.2, rely=0.356, relheight=0.609
, relwidth=0.742)
self.STextMessageList.configure(background="white")
self.STextMessageList.configure(font="TkTextFont")
self.STextMessageList.configure(foreground="black")
self.STextMessageList.configure(highlightbackground="#d9d9d9")
self.STextMessageList.configure(highlightcolor="black")
self.STextMessageList.configure(insertbackground="black")
self.STextMessageList.configure(insertborderwidth="3")
self.STextMessageList.configure(selectbackground="#c4c4c4")
self.STextMessageList.configure(selectforeground="black")
self.STextMessageList.configure(width=10)
self.STextMessageList.configure(wrap="none")
self.STextMessageList.configure(state="disable")
self.BTClearList = tk.Button(self.theFrame)
self.BTClearList.place(relx=0.05, rely=0.6, height=33, width=72)
self.BTClearList.configure(activebackground="#ececec")
self.BTClearList.configure(activeforeground="#000000")
self.BTClearList.configure(command=self.CallBackClearList)
self.BTClearList.configure(background="#d9d9d9")
self.BTClearList.configure(disabledforeground="#a3a3a3")
self.BTClearList.configure(foreground="#000000")
self.BTClearList.configure(highlightbackground="#d9d9d9")
self.BTClearList.configure(highlightcolor="black")
self.BTClearList.configure(pady="0")
self.BTClearList.configure(text='''Clear List''')
def SendtheMessage(self):
TextInput = self.STextMessage.get("1.0", tkinter.END)
ICO3Log.print("Plugin","we send : " + TextInput)
self.DisplayMessage("<<<"+ TextInput)
self.sendTextMessage(TextInput)
self.clearInputText()
ICO3Log.print("Plugin","Send Message Clicked")
def DisplayMessage(self, xMsg):
self.STextMessageList.configure(state="normal")
self.STextMessageList.insert('1.0', xMsg)
self.STextMessageList.configure(state="disable")
def clearInputText(self):
self.STextMessage.delete('1.0', tkinter.END)
def CallBackClearList(self):
self.STextMessageList.configure(state="normal")
self.STextMessageList.delete('1.0', tkinter.END)
self.STextMessageList.configure(state="disable")
ICO3Log.print("Plugin","MessageList Clear Clicked")
def sendTextMessage(self, xTxt):
return self.sendMessageToTarget("TargetTest", xTxt)
# xxMsg = ICO3Message.ICO3Message.createMessage(self.theSenderHeader, xTxt)
# return self.theModuleMaster.sendMessageCheck(xxMsg)
ExtraReceiveLF = False
def reveiveMessage(self, xMsg):
ICO3Log.print("Plugin","Message -->>" + str(xMsg))
xtrLF = ""
if self.ExtraReceiveLF == True:
xtrLF = "\n"
self.DisplayMessage(">>>"+ str(xMsg)+xtrLF)
return 1
def callBackReceiveMessage(self, xMessage):
return self.reveiveMessage(xMessage.getPayloadString())
def initPlugin(self, Argvs):
# xTargetTest = self.getTaskParameterFlat("TargetTest")
# if xTargetTest is not None:
# self.theReceiver = xTargetTest.Port
# if xTargetTest.Target is not None:
# XLP = self.nodeWayProcessing(xTargetTest.Target)
# self.theSenderHeader = ICO3MessageHeader.ICO3MessageHeader.parseFromString(XLP)
self.theFrame.title(Argvs + " (" + str(self.theModuleMaster.ModuleID) + ")")
xLF = self.getExtraParameterValue("ReceiveLF")
if xLF == "True":
self.ExtraReceiveLF = True
pass
def startPlugin(self):
self.theTaskEvent = self.createInstallTaskEventFromTask("TargetTest", self.callBackReceiveMessage)
# if self.theReceiver is not None:
# try:
# xTsk = int(self.theReceiver)
# xEvent = ICO3MessageEvent()
# xEvent.CallBack = self.callBackReceiveMessage
# xEvent.TasKID = xTsk
# self.theModuleMaster.installTaskEvent(xEvent)
# except:
# pass
# pass
def stopPlugin(self):
self.removeTaskEvent(self.theTaskEvent)
self.theFrame.destroy()
pass
def MainLoopProcess(self):
pass
# The following code is added to facilitate the Scrolled widgets you specified.
class AutoScroll(object):
'''Configure the scrollbars for a widget.'''
def __init__(self, master):
# Rozen. Added the try-except clauses so that this class
# could be used for scrolled entry widget for which vertical
# scrolling is not supported. 5/7/14.
try:
vsb = ttk.Scrollbar(master, orient='vertical', command=self.yview)
except:
pass
hsb = ttk.Scrollbar(master, orient='horizontal', command=self.xview)
#self.configure(yscrollcommand=_autoscroll(vsb),
# xscrollcommand=_autoscroll(hsb))
try:
self.configure(yscrollcommand=self._autoscroll(vsb))
except:
pass
self.configure(xscrollcommand=self._autoscroll(hsb))
self.grid(column=0, row=0, sticky='nsew')
try:
vsb.grid(column=1, row=0, sticky='ns')
except:
pass
hsb.grid(column=0, row=1, sticky='ew')
master.grid_columnconfigure(0, weight=1)
master.grid_rowconfigure(0, weight=1)
# Copy geometry methods of master (taken from ScrolledText.py)
if py3:
methods = tk.Pack.__dict__.keys() | tk.Grid.__dict__.keys() \
| tk.Place.__dict__.keys()
else:
methods = tk.Pack.__dict__.keys() + tk.Grid.__dict__.keys() \
+ tk.Place.__dict__.keys()
for meth in methods:
if meth[0] != '_' and meth not in ('config', 'configure'):
setattr(self, meth, getattr(master, meth))
@staticmethod
def _autoscroll(sbar):
'''Hide and show scrollbar as needed.'''
def wrapped(first, last):
first, last = float(first), float(last)
if first <= 0 and last >= 1:
sbar.grid_remove()
else:
sbar.grid()
sbar.set(first, last)
return wrapped
def __str__(self):
return str(self.master)
def _create_container(func):
'''Creates a ttk Frame with a given master, and use this new frame to
place the scrollbars and the widget.'''
def wrapped(cls, master, **kw):
container = ttk.Frame(master)
container.bind('<Enter>', lambda e: _bound_to_mousewheel(e, container))
container.bind('<Leave>', lambda e: _unbound_to_mousewheel(e, container))
return func(cls, container, **kw)
return wrapped
class ScrolledText(AutoScroll, tk.Text):
'''A standard Tkinter Text widget with scrollbars that will
automatically show/hide as needed.'''
@_create_container
def __init__(self, master, **kw):
tk.Text.__init__(self, master, **kw)
AutoScroll.__init__(self, master)
import platform
def _bound_to_mousewheel(event, widget):
child = widget.winfo_children()[0]
if platform.system() == 'Windows' or platform.system() == 'Darwin':
child.bind_all('<MouseWheel>', lambda e: _on_mousewheel(e, child))
child.bind_all('<Shift-MouseWheel>', lambda e: _on_shiftmouse(e, child))
else:
child.bind_all('<Button-4>', lambda e: _on_mousewheel(e, child))
child.bind_all('<Button-5>', lambda e: _on_mousewheel(e, child))
child.bind_all('<Shift-Button-4>', lambda e: _on_shiftmouse(e, child))
child.bind_all('<Shift-Button-5>', lambda e: _on_shiftmouse(e, child))
def _unbound_to_mousewheel(event, widget):
if platform.system() == 'Windows' or platform.system() == 'Darwin':
widget.unbind_all('<MouseWheel>')
widget.unbind_all('<Shift-MouseWheel>')
else:
widget.unbind_all('<Button-4>')
widget.unbind_all('<Button-5>')
widget.unbind_all('<Shift-Button-4>')
widget.unbind_all('<Shift-Button-5>')
def _on_mousewheel(event, widget):
if platform.system() == 'Windows':
widget.yview_scroll(-1*int(event.delta/120),'units')
elif platform.system() == 'Darwin':
widget.yview_scroll(-1*int(event.delta),'units')
else:
if event.num == 4:
widget.yview_scroll(-1, 'units')
elif event.num == 5:
widget.yview_scroll(1, 'units')
def _on_shiftmouse(event, widget):
if platform.system() == 'Windows':
widget.xview_scroll(-1*int(event.delta/120), 'units')
elif platform.system() == 'Darwin':
widget.xview_scroll(-1*int(event.delta), 'units')
else:
if event.num == 4:
widget.xview_scroll(-1, 'units')
elif event.num == 5:
widget.xview_scroll(1, 'units') | PypiClean |
/BatchQ-0.1-1-pre-alpha.tar.gz/BatchQ-0.1-1-pre-alpha/batchq/pipelines/shell/utils.py | from batchq.pipelines.shell.bash import BashTerminal
from batchq.pipelines.shell.ssh import SSHTerminal
from batchq.pipelines.shell.sftp import SFTPTerminal
class FileTransferTerminal(SFTPTerminal):
"""
This object is similar to SFTPTerminal if server, username and
password are passed to the constructor. If server=None, this class
implements the SFTPTerminal operations for the for local file
system. Wether a FileTransferTerminal class is working on the local
filesystem or not can be checked with FileTransferTerminal.islocal().
"""
def __init__(self, server = None, username = "", password = "", port = 22, accept_figerprint = False):
self._server = server
if not server is None:
super(FileTransferTerminal,self).__init__(server, username, password, port, accept_figerprint)
else:
self._remote_dir = os.getcwd()
self._local_dir = os.getcwd()
def islocal(self):
if self._server is None:
return True
return False
def chdir(self, path):
if self._server:
return super(FileTransferTerminal,self).chdir(path)
else:
if os.path.isdir(path):
self._remote_dir = path
return True
return False
def local_chdir(self, path):
if self._server:
return super(FileTransferTerminal,self).local_chdir(path)
else:
if os.path.isdir(path):
self._local_dir = path
return True
return False
def pwd(self):
if self._server:
return super(FileTransferTerminal,self).pwd()
else:
return self._remote_dir
def local_pwd(self):
if self._server:
return super(FileTransferTerminal,self).local_pwd()
else:
return self._local_dir
def _copy(self, file1, file2):
"""
This function does not use shutil as shutil does not handle
whitespaces correctly.
"""
# FIXME: Not wwindows compatiable
os.system ("cp '%s' '%s'" % (file1, file2))
return os.path.isfile(file2)
def sendfile(self, local_file, remote_file):
if self._server:
return super(FileTransferTerminal,self).sendfile(local_file, remote_file)
else:
return self._copy(os.path.join(self._local_dir,local_file),os.path.join(self._remote_dir,remote_file))
def getfile(self, local_file, remote_file):
if self._server:
return super(FileTransferTerminal,self).getfile(local_file, remote_file)
else:
return self._copy(os.path.join(self._remote_dir,remote_file),os.path.join(self._local_dir,local_file))
class FileCommander(FileTransferTerminal):
"""
FileCommander provides an easy-to-use interface for transfering
files as well as access both the local and the remote file system. It
performs common operations such compare directories in the local and
remote views as well as syncronising them.
The local and remote terminals can be access through the property
FileCommander.local and FileCommander.remote, respectively, which
are both instances of the BashTerminal.
"""
MODE_LOCAL_REMOTE = 1
MODE_REMOTE_LOCAL = 2
def __init__(self, server = None, username = "", password = "", port = 22, accept_figerprint = False):
super(FileCommander,self).__init__(server, username, password, port, accept_figerprint)
self._local_bash = BashTerminal()
if server:
self._remote_bash = SSHTerminal(server, username, password, port, accept_figerprint)
else:
self._remote_bash = BashTerminal()
@property
def local(self):
"""
This property returns an instance of ``BashTerminal``.
"""
return self._local_bash
@property
def remote(self):
"""
This property returns an instance of ``SSHTerminal`` connected
to the remote machine if server was different than ``None`` in
the constructor. Otherwise, an instance of ``BashTerminal`` is returned.
"""
return self._remote_bash
def diff(self, local_dir = ".", remote_dir =".", recursive = True, mode = 3, absolute_path = False, ignore_hidden =True):
"""
Compare local and remote directories, recursively ``recursive =
True`` (default), by computing a hash of each file.
Depending on wether ``mode`` is set to
``FileCommander.MODE_LOCAL_REMOTE`` or ``FileCommander.MODE_REMOTE_LOCAL``
two lists are returned with files and directories that are
out-of-date, or missing. As standard, hidden files are
ignored. To include hidden files set ``ignore_hidden =
False``. By default, paths returned are relative, but if
``absolute_path = True`` they are converted into absolute paths.
"""
if not self.local.pushd(local_dir):
raise BaseException("Local directory does not exist")
if not self.remote.pushd(remote_dir):
raise BaseException("Remote directory does not exist")
lpwd = self.local.pwd()
rpwd = self.remote.pwd()
format_lpath = lambda x: x if not absolute_path else self.local.path.join(lpwd,x)
format_rpath = lambda x: x if not absolute_path else self.remote.path.join(rpwd,x)
#REM print "Hashing ", lpwd, local_dir
h1 = self.local.directory_hash(".", ignore_hidden)
#REM print "Hashing ", rpwd, remote_dir
h2 = self.remote.directory_hash(".", ignore_hidden)
if h1 == h2:
return ([],[])
(lfiles, ldirs) = self.local.list(".", recursive)
(rfiles, rdirs) = self.remote.list(".", recursive)
files = []
dirs = []
already_checked = []
if mode & 1:
for file in lfiles:
### FIXME:, does not work if paths stars with ./
if (file[0] == "." or "/." in file) and ignore_hidden: continue
already_checked += [file,]
if file in rfiles:
h1 = self.local.file_hash(file)
h2 = self.remote.file_hash(file)
if not h1 == h2:
files += [(format_lpath(file), format_rpath(file))]
else:
files += [(format_lpath(file), "")]
if mode & 2:
for file in rfiles:
if (file[0] == "." or "/." in file) and ignore_hidden: continue
if file in already_checked: continue
if file in lfiles:
h1 = self.local.file_hash(file)
h2 = self.remote.file_hash(file)
if not h1 == h2:
files += [(format_lpath(file), format_rpath(file))]
else:
files += [("", format_rpath(file))]
already_checked = []
if mode & 1:
for dir in ldirs:
if (dir[0] == "." or "/." in dir) and ignore_hidden: continue
already_checked += [dir,]
if not dir in rdirs:
dirs += [(format_lpath(dir), "")]
if mode & 2:
for dir in rdirs:
if (dir[0] == "." or "/." in dir) and ignore_hidden: continue
if dir in already_checked: continue
if not dir in ldirs:
dirs += [("", format_lpath(dir))]
if not self.local.popd():
raise BaseException("Local direcory stack error")
if not self.remote.popd():
raise BaseException("Remote direcory stack error")
return (files, dirs)
def sync(self, local_dir = ".", remote_dir =".", recursive = True, mode = 3, ignore_hidden =True, diff_local_dir =None, diff_remote_dir =None):
"""
This function compares a local and a remote directory and
transfer missing files in one direction depending on the mode
(which can either be ``FileCommander.MODE_LOCAL_REMOTE`` or
``FileCommander.MODE_REMOTE_LOCAL``). Note this function ignores the
creation/modification date of the file or directory.
"""
# print "Syncing", local_dir, remote_dir,diff_local_dir, diff_remote_dir, mode
# return
oldlocal = self.local_pwd()
oldremote = self.pwd()
if not self.local_chdir(local_dir): return False
if not self.chdir(remote_dir): return False
if diff_local_dir is None: diff_local_dir = local_dir
if diff_remote_dir is None: diff_remote_dir = remote_dir
if mode == self.MODE_LOCAL_REMOTE:
lfiles, ldirs = self.diff(diff_local_dir, diff_remote_dir, recursive, self.MODE_LOCAL_REMOTE, False, ignore_hidden)
copyfnc = self.sendfile
bash = self.remote
files =[file for file,_ in lfiles]
dirs =[dir for dir,_ in ldirs]
work_dir = remote_dir
elif mode == self.MODE_REMOTE_LOCAL:
rfiles, rdirs = self.diff(diff_local_dir, diff_remote_dir, recursive, self.MODE_REMOTE_LOCAL, False, ignore_hidden)
copyfnc = self.getfile
bash = self.local
files = [file for _,file in rfiles]
dirs = [dir for _,dir in rdirs]
work_dir = local_dir
else:
raise BaseException("Select one and only one mode.")
if not bash.pushd(work_dir):
raise BaseException("Directory does not exist %s" %work_dir)
# Syncronising directories
for dir in sorted(dirs):
if dir.strip() == "":
raise BaseException("Error recieved an empty directory")
if not bash.mkdir(dir):
raise BaseException("Could not create: '%s'. Issued '%s' and got following output: '%s'" % (dir, bash.last_input,bash.last_output))
# Syncronising files
for file in sorted(files):
if file.strip() == "":
raise "Error recieved an empty directory"
if not copyfnc(file,file):
raise BaseException("Could not copy: '%s'. Issued '%s' and got following output: '%s'" % (file, self.last_input,self.last_output))
if not bash.popd():
raise "Could not pop working directory"
self.local_chdir(oldlocal)
self.chdir(oldremote)
# print (dirs, files)
return (dirs, files) | PypiClean |
/Create-Python-Project-0.1.0.tar.gz/Create-Python-Project-0.1.0/create_python_project/scripts/py.py | import ast
from .base import ContentWithInfo, BaseScript, BaseReader, BaseParser, BaseWriter
from .rst import RSTContent, RSTScript, RSTVisitor, RSTParser
from ..info import PyInfo, PyDocstringInfo, SingleLineTextInfo
class PyCodeVisitor(ast.NodeVisitor):
def __init__(self, content):
self.content = content
class TransformImportVisitor(PyCodeVisitor):
def __init__(self, content, old_import=None, new_import=None):
super().__init__(content)
self.old_import = old_import
self.new_import = new_import
self.should_rename_import = False
def visit_Import(self, node):
for name in node.names:
if name.name == self.old_import:
self.content.update_line(node.lineno - 1, self.old_import, self.new_import)
self.should_rename_import = name.asname is None
elif name.asname == self.old_import: # pragma: no branch
self.should_rename_import = False
def visit_ImportFrom(self, node):
old_module, new_module = node.module, self.content.update_value(node.module,
self.old_import,
self.new_import)
self.content.update_line(node.lineno - 1, old_module, new_module)
for name in node.names:
if name.name == self.old_import:
self.should_rename_import = False
def visit_Name(self, node):
if self.should_rename_import:
if node.id == self.old_import:
self.content.update_line(node.lineno - 1, self.old_import, self.new_import)
def visit_arg(self, node):
if self.should_rename_import:
if node.arg == self.old_import:
self.content.update_line(node.lineno - 1, self.old_import, self.new_import)
class PyCodeContent(ContentWithInfo):
def __init__(self, info=None, lines=None):
super().__init__(info, lines)
self.ast = None
def transform(self, old_import=None, new_import=None, new_info=None, **kwargs):
super().transform(new_info=new_info, **kwargs)
self.prepare_transform()
if isinstance(old_import, str) and isinstance(new_import, str):
TransformImportVisitor(self, old_import=old_import, new_import=new_import).visit(self.ast)
def set_ast(self, text_script):
self.ast = ast.parse(text_script)
def prepare_transform(self):
self.set_ast(self.output())
class PyContent(ContentWithInfo):
"""Base content class for .py script"""
info_class = PyInfo
def __init__(self, info=None, lines=None):
super().__init__(info, lines)
self.ast = None
self.docstring = None
self.code = PyCodeContent(info=self.info.code)
def set_ast(self, ast_module):
self.ast = ast_module
self.init_docstring()
def init_docstring(self):
docstring = ast.get_docstring(self.ast)
if docstring is not None:
self.info.docstring = PyDocstringInfo()
self.docstring = RSTContent(info=self.info.docstring)
self.info.docstring_lineno = self.ast.body[0].lineno
def set_lines(self, lines=None):
lines = lines or self.lines
super().set_lines(lines)
self.code.set_lines(lines[self.info.docstring_lineno:])
def output(self):
if self.docstring is not None and self.docstring.lines:
docstring = '\n'.join(['"""'] + [' ' * 4 + line.strip() if len(line.strip()) > 0 else ''
for line in self.docstring.lines] + ['"""\n'])
else:
docstring = ''
return docstring + '\n'.join(self.code.lines)
def transform(self, new_info=None, **kwargs):
if self.docstring:
self.docstring.transform(new_info=getattr(new_info, 'docstring', None), **kwargs)
self.code.transform(new_info=getattr(new_info, 'code', None), **kwargs)
class PyDocstringVisitor(RSTVisitor):
"""Base class for visiting Python script docstring"""
def visit_field(self, node):
field_name, field_body = node.children[0].astext(), node.children[1].children[0].astext()
if field_name in self.content.info._fields:
setattr(self.content.info, field_name, SingleLineTextInfo(text=field_body, lineno=node.line - 1))
class PyDocstringParser(RSTParser):
"""Base class for parsing Python Script Docstrings"""
_rst_visitor_class = PyDocstringVisitor
class PyDocstringScript(RSTScript):
"""Base class for parser python docstring"""
parser_class = PyDocstringParser
class PyReader(BaseReader):
"""Base class for reading python scripts"""
content_class = PyContent
class PyParser(BaseParser):
"""Base class for parsing py scripts"""
def setup_parse(self, input_string):
"""Prepare parsing"""
super().setup_parse(input_string)
self.docstring_parser = PyDocstringParser()
def parse(self, input_string, content):
super().parse(input_string, content)
# Parse .py script using ast
content.set_ast(ast.parse(input_string))
# Parse docstring & code
self.parse_docstring(content)
self.parse_code(content)
content.set_lines()
def parse_docstring(self, content):
if content.docstring is not None:
self.docstring_parser.parse(ast.get_docstring(content.ast), content.docstring)
def parse_code(self, content):
pass
class PyWriter(BaseWriter):
"""Base writer for Python Scripts"""
class PyScript(BaseScript):
"""Base class for manipulating py scripts"""
supported_format = (
'*.py',
)
parser_class = PyParser
reader_class = PyReader
writer_class = PyWriter | PypiClean |
/FreePyBX-1.0-RC1.tar.gz/FreePyBX-1.0-RC1/freepybx/public/js/dojo/dnd/Source.js.uncompressed.js | define("dojo/dnd/Source", ["../main", "./Selector", "./Manager"], function(dojo, Selector, Manager) {
// module:
// dojo/dnd/Source
// summary:
// TODOC
/*=====
Selector = dojo.dnd.Selector;
=====*/
/*
Container property:
"Horizontal"- if this is the horizontal container
Source states:
"" - normal state
"Moved" - this source is being moved
"Copied" - this source is being copied
Target states:
"" - normal state
"Disabled" - the target cannot accept an avatar
Target anchor state:
"" - item is not selected
"Before" - insert point is before the anchor
"After" - insert point is after the anchor
*/
/*=====
dojo.dnd.__SourceArgs = function(){
// summary:
// a dict of parameters for DnD Source configuration. Note that any
// property on Source elements may be configured, but this is the
// short-list
// isSource: Boolean?
// can be used as a DnD source. Defaults to true.
// accept: Array?
// list of accepted types (text strings) for a target; defaults to
// ["text"]
// autoSync: Boolean
// if true refreshes the node list on every operation; false by default
// copyOnly: Boolean?
// copy items, if true, use a state of Ctrl key otherwise,
// see selfCopy and selfAccept for more details
// delay: Number
// the move delay in pixels before detecting a drag; 0 by default
// horizontal: Boolean?
// a horizontal container, if true, vertical otherwise or when omitted
// selfCopy: Boolean?
// copy items by default when dropping on itself,
// false by default, works only if copyOnly is true
// selfAccept: Boolean?
// accept its own items when copyOnly is true,
// true by default, works only if copyOnly is true
// withHandles: Boolean?
// allows dragging only by handles, false by default
// generateText: Boolean?
// generate text node for drag and drop, true by default
this.isSource = isSource;
this.accept = accept;
this.autoSync = autoSync;
this.copyOnly = copyOnly;
this.delay = delay;
this.horizontal = horizontal;
this.selfCopy = selfCopy;
this.selfAccept = selfAccept;
this.withHandles = withHandles;
this.generateText = true;
}
=====*/
// For back-compat, remove in 2.0.
if(!dojo.isAsync){
dojo.ready(0, function(){
var requires = ["dojo/dnd/AutoSource", "dojo/dnd/Target"];
require(requires); // use indirection so modules not rolled into a build
})
}
return dojo.declare("dojo.dnd.Source", Selector, {
// summary:
// a Source object, which can be used as a DnD source, or a DnD target
// object attributes (for markup)
isSource: true,
horizontal: false,
copyOnly: false,
selfCopy: false,
selfAccept: true,
skipForm: false,
withHandles: false,
autoSync: false,
delay: 0, // pixels
accept: ["text"],
generateText: true,
constructor: function(/*DOMNode|String*/node, /*dojo.dnd.__SourceArgs?*/params){
// summary:
// a constructor of the Source
// node:
// node or node's id to build the source on
// params:
// any property of this class may be configured via the params
// object which is mixed-in to the `dojo.dnd.Source` instance
dojo.mixin(this, dojo.mixin({}, params));
var type = this.accept;
if(type.length){
this.accept = {};
for(var i = 0; i < type.length; ++i){
this.accept[type[i]] = 1;
}
}
// class-specific variables
this.isDragging = false;
this.mouseDown = false;
this.targetAnchor = null;
this.targetBox = null;
this.before = true;
this._lastX = 0;
this._lastY = 0;
// states
this.sourceState = "";
if(this.isSource){
dojo.addClass(this.node, "dojoDndSource");
}
this.targetState = "";
if(this.accept){
dojo.addClass(this.node, "dojoDndTarget");
}
if(this.horizontal){
dojo.addClass(this.node, "dojoDndHorizontal");
}
// set up events
this.topics = [
dojo.subscribe("/dnd/source/over", this, "onDndSourceOver"),
dojo.subscribe("/dnd/start", this, "onDndStart"),
dojo.subscribe("/dnd/drop", this, "onDndDrop"),
dojo.subscribe("/dnd/cancel", this, "onDndCancel")
];
},
// methods
checkAcceptance: function(source, nodes){
// summary:
// checks if the target can accept nodes from this source
// source: Object
// the source which provides items
// nodes: Array
// the list of transferred items
if(this == source){
return !this.copyOnly || this.selfAccept;
}
for(var i = 0; i < nodes.length; ++i){
var type = source.getItem(nodes[i].id).type;
// type instanceof Array
var flag = false;
for(var j = 0; j < type.length; ++j){
if(type[j] in this.accept){
flag = true;
break;
}
}
if(!flag){
return false; // Boolean
}
}
return true; // Boolean
},
copyState: function(keyPressed, self){
// summary:
// Returns true if we need to copy items, false to move.
// It is separated to be overwritten dynamically, if needed.
// keyPressed: Boolean
// the "copy" key was pressed
// self: Boolean?
// optional flag that means that we are about to drop on itself
if(keyPressed){ return true; }
if(arguments.length < 2){
self = this == Manager.manager().target;
}
if(self){
if(this.copyOnly){
return this.selfCopy;
}
}else{
return this.copyOnly;
}
return false; // Boolean
},
destroy: function(){
// summary:
// prepares the object to be garbage-collected
dojo.dnd.Source.superclass.destroy.call(this);
dojo.forEach(this.topics, dojo.unsubscribe);
this.targetAnchor = null;
},
// mouse event processors
onMouseMove: function(e){
// summary:
// event processor for onmousemove
// e: Event
// mouse event
if(this.isDragging && this.targetState == "Disabled"){ return; }
dojo.dnd.Source.superclass.onMouseMove.call(this, e);
var m = Manager.manager();
if(!this.isDragging){
if(this.mouseDown && this.isSource &&
(Math.abs(e.pageX - this._lastX) > this.delay || Math.abs(e.pageY - this._lastY) > this.delay)){
var nodes = this.getSelectedNodes();
if(nodes.length){
m.startDrag(this, nodes, this.copyState(dojo.isCopyKey(e), true));
}
}
}
if(this.isDragging){
// calculate before/after
var before = false;
if(this.current){
if(!this.targetBox || this.targetAnchor != this.current){
this.targetBox = dojo.position(this.current, true);
}
if(this.horizontal){
before = (e.pageX - this.targetBox.x) < (this.targetBox.w / 2);
}else{
before = (e.pageY - this.targetBox.y) < (this.targetBox.h / 2);
}
}
if(this.current != this.targetAnchor || before != this.before){
this._markTargetAnchor(before);
m.canDrop(!this.current || m.source != this || !(this.current.id in this.selection));
}
}
},
onMouseDown: function(e){
// summary:
// event processor for onmousedown
// e: Event
// mouse event
if(!this.mouseDown && this._legalMouseDown(e) && (!this.skipForm || !dojo.dnd.isFormElement(e))){
this.mouseDown = true;
this._lastX = e.pageX;
this._lastY = e.pageY;
dojo.dnd.Source.superclass.onMouseDown.call(this, e);
}
},
onMouseUp: function(e){
// summary:
// event processor for onmouseup
// e: Event
// mouse event
if(this.mouseDown){
this.mouseDown = false;
dojo.dnd.Source.superclass.onMouseUp.call(this, e);
}
},
// topic event processors
onDndSourceOver: function(source){
// summary:
// topic event processor for /dnd/source/over, called when detected a current source
// source: Object
// the source which has the mouse over it
if(this != source){
this.mouseDown = false;
if(this.targetAnchor){
this._unmarkTargetAnchor();
}
}else if(this.isDragging){
var m = Manager.manager();
m.canDrop(this.targetState != "Disabled" && (!this.current || m.source != this || !(this.current.id in this.selection)));
}
},
onDndStart: function(source, nodes, copy){
// summary:
// topic event processor for /dnd/start, called to initiate the DnD operation
// source: Object
// the source which provides items
// nodes: Array
// the list of transferred items
// copy: Boolean
// copy items, if true, move items otherwise
if(this.autoSync){ this.sync(); }
if(this.isSource){
this._changeState("Source", this == source ? (copy ? "Copied" : "Moved") : "");
}
var accepted = this.accept && this.checkAcceptance(source, nodes);
this._changeState("Target", accepted ? "" : "Disabled");
if(this == source){
Manager.manager().overSource(this);
}
this.isDragging = true;
},
onDndDrop: function(source, nodes, copy, target){
// summary:
// topic event processor for /dnd/drop, called to finish the DnD operation
// source: Object
// the source which provides items
// nodes: Array
// the list of transferred items
// copy: Boolean
// copy items, if true, move items otherwise
// target: Object
// the target which accepts items
if(this == target){
// this one is for us => move nodes!
this.onDrop(source, nodes, copy);
}
this.onDndCancel();
},
onDndCancel: function(){
// summary:
// topic event processor for /dnd/cancel, called to cancel the DnD operation
if(this.targetAnchor){
this._unmarkTargetAnchor();
this.targetAnchor = null;
}
this.before = true;
this.isDragging = false;
this.mouseDown = false;
this._changeState("Source", "");
this._changeState("Target", "");
},
// local events
onDrop: function(source, nodes, copy){
// summary:
// called only on the current target, when drop is performed
// source: Object
// the source which provides items
// nodes: Array
// the list of transferred items
// copy: Boolean
// copy items, if true, move items otherwise
if(this != source){
this.onDropExternal(source, nodes, copy);
}else{
this.onDropInternal(nodes, copy);
}
},
onDropExternal: function(source, nodes, copy){
// summary:
// called only on the current target, when drop is performed
// from an external source
// source: Object
// the source which provides items
// nodes: Array
// the list of transferred items
// copy: Boolean
// copy items, if true, move items otherwise
var oldCreator = this._normalizedCreator;
// transferring nodes from the source to the target
if(this.creator){
// use defined creator
this._normalizedCreator = function(node, hint){
return oldCreator.call(this, source.getItem(node.id).data, hint);
};
}else{
// we have no creator defined => move/clone nodes
if(copy){
// clone nodes
this._normalizedCreator = function(node, hint){
var t = source.getItem(node.id);
var n = node.cloneNode(true);
n.id = dojo.dnd.getUniqueId();
return {node: n, data: t.data, type: t.type};
};
}else{
// move nodes
this._normalizedCreator = function(node, hint){
var t = source.getItem(node.id);
source.delItem(node.id);
return {node: node, data: t.data, type: t.type};
};
}
}
this.selectNone();
if(!copy && !this.creator){
source.selectNone();
}
this.insertNodes(true, nodes, this.before, this.current);
if(!copy && this.creator){
source.deleteSelectedNodes();
}
this._normalizedCreator = oldCreator;
},
onDropInternal: function(nodes, copy){
// summary:
// called only on the current target, when drop is performed
// from the same target/source
// nodes: Array
// the list of transferred items
// copy: Boolean
// copy items, if true, move items otherwise
var oldCreator = this._normalizedCreator;
// transferring nodes within the single source
if(this.current && this.current.id in this.selection){
// do nothing
return;
}
if(copy){
if(this.creator){
// create new copies of data items
this._normalizedCreator = function(node, hint){
return oldCreator.call(this, this.getItem(node.id).data, hint);
};
}else{
// clone nodes
this._normalizedCreator = function(node, hint){
var t = this.getItem(node.id);
var n = node.cloneNode(true);
n.id = dojo.dnd.getUniqueId();
return {node: n, data: t.data, type: t.type};
};
}
}else{
// move nodes
if(!this.current){
// do nothing
return;
}
this._normalizedCreator = function(node, hint){
var t = this.getItem(node.id);
return {node: node, data: t.data, type: t.type};
};
}
this._removeSelection();
this.insertNodes(true, nodes, this.before, this.current);
this._normalizedCreator = oldCreator;
},
onDraggingOver: function(){
// summary:
// called during the active DnD operation, when items
// are dragged over this target, and it is not disabled
},
onDraggingOut: function(){
// summary:
// called during the active DnD operation, when items
// are dragged away from this target, and it is not disabled
},
// utilities
onOverEvent: function(){
// summary:
// this function is called once, when mouse is over our container
dojo.dnd.Source.superclass.onOverEvent.call(this);
Manager.manager().overSource(this);
if(this.isDragging && this.targetState != "Disabled"){
this.onDraggingOver();
}
},
onOutEvent: function(){
// summary:
// this function is called once, when mouse is out of our container
dojo.dnd.Source.superclass.onOutEvent.call(this);
Manager.manager().outSource(this);
if(this.isDragging && this.targetState != "Disabled"){
this.onDraggingOut();
}
},
_markTargetAnchor: function(before){
// summary:
// assigns a class to the current target anchor based on "before" status
// before: Boolean
// insert before, if true, after otherwise
if(this.current == this.targetAnchor && this.before == before){ return; }
if(this.targetAnchor){
this._removeItemClass(this.targetAnchor, this.before ? "Before" : "After");
}
this.targetAnchor = this.current;
this.targetBox = null;
this.before = before;
if(this.targetAnchor){
this._addItemClass(this.targetAnchor, this.before ? "Before" : "After");
}
},
_unmarkTargetAnchor: function(){
// summary:
// removes a class of the current target anchor based on "before" status
if(!this.targetAnchor){ return; }
this._removeItemClass(this.targetAnchor, this.before ? "Before" : "After");
this.targetAnchor = null;
this.targetBox = null;
this.before = true;
},
_markDndStatus: function(copy){
// summary:
// changes source's state based on "copy" status
this._changeState("Source", copy ? "Copied" : "Moved");
},
_legalMouseDown: function(e){
// summary:
// checks if user clicked on "approved" items
// e: Event
// mouse event
// accept only the left mouse button
if(!dojo.mouseButtons.isLeft(e)){ return false; }
if(!this.withHandles){ return true; }
// check for handles
for(var node = e.target; node && node !== this.node; node = node.parentNode){
if(dojo.hasClass(node, "dojoDndHandle")){ return true; }
if(dojo.hasClass(node, "dojoDndItem") || dojo.hasClass(node, "dojoDndIgnore")){ break; }
}
return false; // Boolean
}
});
}); | PypiClean |
/CycloneDX_Buildroot-1.0.5.tar.gz/CycloneDX_Buildroot-1.0.5/src/CycloneDX_Buildroot_robert_smigielski/generateBuildrootSBOM.py |
# This file is part of CycloneDX Buildroot module.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# SPDX-License-Identifier: Apache-2.0
# Copyright (c) 2023 OWASP Foundation. All Rights Reserved.
import argparse
import csv
import json
from cyclonedx.model.bom import Bom, BomMetaData
from cyclonedx.model.component import Component, ComponentType
from packageurl import PackageURL
from cyclonedx.factory.license import LicenseFactory
from cyclonedx.model import OrganizationalEntity
from cyclonedx.output import get_instance, BaseOutput, OutputFormat
from xml.dom import minidom
# Buildroot manifest.csv file header shows the following header row
# PACKAGE,VERSION,LICENSE,LICENSE FILES,SOURCE ARCHIVE,SOURCE SITE,DEPENDENCIES WITH LICENSES
#
def create_buildroot_sbom(input_file_name: str, cpe_file_name: str, br_bom: Bom):
br_bom_local: Bom = br_bom
#
# Capture the components that describe the complete inventory of first-party software
# Buildroot CSV file supplies software package data in each row. Any change to that map of data will break
# the resulting JSON. Use a try/except block to help with run time issues.
with open(input_file_name, newline='') as csvfile:
sheetX = csv.DictReader(csvfile)
for row in sheetX:
try:
purl_info = PackageURL(type='generic', name=row['PACKAGE'], version=row['VERSION'],
qualifiers={'download_url': row['SOURCE SITE'] + row['SOURCE ARCHIVE']})
lfac = LicenseFactory()
cpe_id_value = "unknown"
cpe_id_value = get_cpe_value(cpe_file_name, row['PACKAGE'])
next_component = Component(name=row['PACKAGE'],
type=ComponentType.FIRMWARE,
licenses=[lfac.make_from_string(row['LICENSE'])],
version=row['VERSION'],
purl=purl_info,
cpe=cpe_id_value,
bom_ref=row['PACKAGE'])
br_bom_local.components.add(next_component)
br_bom_local.register_dependency(br_bom.metadata.component, [next_component])
except KeyError:
print("The input file header does not contain the expected data in the first row of the file.")
print(
"Expected PACKAGE,VERSION,LICENSE,LICENSE FILES,SOURCE ARCHIVE,SOURCE SITE,DEPENDENCIES WITH LICENSES")
print("Found the following in the csv file first row:", row)
print("Cannot continue with the provided input file. Exiting.")
exit(-1)
return br_bom_local
# From the cpe.json file iterate across the list of components
# For each component in the br_bom search the cpe file for a matching component by comparing
# either bom-ref or name field. Once a match is found from the cpe file copy the name value
# pair of the cpe-id field replacing the purl filed in br_bom.
# input : name of the software component
# output: returns the cpe value
def get_cpe_value(cpe_file_name: str, sw_component_name: str):
retval = "not found"
if (cpe_file_name == "unknown"):
return retval
cpe_file = open(cpe_file_name)
cpe_data = dict(json.load(cpe_file))
for cpe_key, cpe_value in cpe_data.items():
try:
sw_object = dict(cpe_data[cpe_key])
if (sw_object['name'] == sw_component_name):
x = sw_object.items()
retval = sw_object['cpe-id']
cpe_file.close()
return retval
except: # Some entries do not have a "name" key and no "cpe-id" so skip these.
pass
cpe_file.close()
return retval
def my_main(*args):
parser = argparse.ArgumentParser(description='CycloneDX BOM Generator')
parser.add_argument('-i', action='store', dest='input_file', default='manifest.csv',
help='comma separated value (csv) file of buildroot manifest data')
parser.add_argument('-o', action='store', dest='output_file', default='buildroot_IOT_sbom',
help='SBOM output file name for json and xml')
parser.add_argument('-n', action='store', dest='product_name', default='unknown', help='name of the product')
parser.add_argument('-v', action='store', dest='product_version', default='unknown', help='product version string')
parser.add_argument('-m', action='store', dest='manufacturer_name', default='unknown',
help='name of product manufacturer')
parser.add_argument('-c', action='store', dest='cpe_input_file', default='unknown',
help='cpe file from make show-info')
if (len(args) != 0):
unittest_args = list(args)
args = parser.parse_args(list(args))
else:
args = parser.parse_args()
print('Buildroot manifest input file: ' + args.input_file)
print('Output SBOM: ' + args.output_file)
print('SBOM Product Name: ' + args.manufacturer_name)
print('SBOM Product Version: ' + args.product_version)
print('SBOM Product Manufacturer: ' + args.manufacturer_name)
print('Buildroot cpe input file: ' + args.cpe_input_file)
br_bom = Bom()
br_bom.metadata.component = rootComponent = Component(name=args.product_name,
version=args.product_version,
bom_ref=args.product_name)
br_meta = BomMetaData(manufacture=OrganizationalEntity(name=args.manufacturer_name),
component=rootComponent)
br_bom.metadata = br_meta
br_bom = create_buildroot_sbom(str(args.input_file).strip(" "),str(args.cpe_input_file).strip(" "), br_bom)
# Produce the output in pretty JSON format.
outputter: BaseOutput(bom=br_bom) = get_instance(bom=br_bom, output_format=OutputFormat.JSON)
bom_json = outputter.output_as_string()
outputfile = open((args.output_file + ".json"), mode='w')
json.dump(json.loads(bom_json), outputfile, indent=3)
outputfile.close()
# Produce the output in XML format that is in a one-line format.
outputterXML: BaseOutput(bom=br_bom) = get_instance(bom=br_bom, output_format=OutputFormat.XML)
outputterXML.output_to_file(filename=(args.output_file + ".one.xml"), allow_overwrite=True)
# Produce the output in XML format that is indented format.
myxmldocfile = open((args.output_file + ".one.xml"))
myxmldoc = minidom.parseString(myxmldocfile.read())
outputfile = open(args.output_file + ".xml", mode='w')
print(myxmldoc.toprettyxml(), file=outputfile)
outputfile.close()
myxmldocfile.close()
if __name__ == "__main__":
my_main() | PypiClean |
/Fern2-1.4.1.tar.gz/Fern2-1.4.1/fern/utils/common.py | """common function"""
import logging
import pathlib
import subprocess
from typing import *
import tensorflow as tf
from tqdm import tqdm
import yaml
logger = logging.getLogger('Fern')
def check_path(path):
"""
check if path exits. If not exit, the path.parent will be created.
Parameters
----------
path : str, Path
path to be check
"""
path = pathlib.Path(path).parent
if not path.exists():
path.mkdir(parents=True)
def get_available_gpu(min_memory=0):
"""
find the gpu of which free memory is largest
References
----------
refer to link: https://stackoverflow.com/a/59571639
Parameters
----------
min_memory : int
Minimum allowable memory in MB
Returns
-------
tuple[int, int] or None
- If there is gpu available, return (gpu index, free memory)
- Else return (None, 0)
"""
# output: b'memory.free [MiB]\n48600 MiB\n48274 MiB\n'
command = "nvidia-smi --query-gpu=memory.free --format=csv"
try:
memory_free_info = subprocess.check_output(command.split())
except FileNotFoundError:
return None, 0
memory_info = memory_free_info.decode('ascii').split('\n')[1:-1]
res = []
for i, memory in enumerate(memory_info):
memory = int(memory.split()[0])
if memory > min_memory:
res.append((i, memory))
res = sorted(res, key=lambda item: item[1], reverse=True)
if res:
return res[0]
else:
return None, 0
def set_gpu(index: Optional[int] = None, growth: bool = True):
"""
set which GPU to use
Args:
index: the gpu index
growth: whether to limit gpu memory growth
"""
gpus = tf.config.experimental.list_physical_devices('GPU')
if growth:
for gpu in gpus:
tf.config.experimental.set_memory_growth(gpu, True)
if gpus and isinstance(index, int):
try:
tf.config.experimental.set_visible_devices(gpus[index], 'GPU')
except RuntimeError as e:
# Memory growth must be set before GPUs have been initialized
logger.info(e)
except IndexError as e:
# there is no such a gpu found
logger.info(e)
class ProgressBar(tqdm):
def __init__(self, *arg, **kwargs):
super().__init__(ascii='->', leave=False, *arg, **kwargs)
def read_config(path: str):
"""读取yaml配置文件"""
with open(path, 'r') as f:
data = yaml.safe_load(f)
return data | PypiClean |
/MNN-0.0.7-cp27-cp27mu-manylinux2010_x86_64.whl/MNNTools/MNN_FB/Convolution3D.py |
# namespace: MNN
import flatbuffers
class Convolution3D(object):
__slots__ = ['_tab']
@classmethod
def GetRootAsConvolution3D(cls, buf, offset):
n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
x = Convolution3D()
x.Init(buf, n + offset)
return x
# Convolution3D
def Init(self, buf, pos):
self._tab = flatbuffers.table.Table(buf, pos)
# Convolution3D
def Common(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
if o != 0:
x = self._tab.Indirect(o + self._tab.Pos)
from .Convolution3DCommon import Convolution3DCommon
obj = Convolution3DCommon()
obj.Init(self._tab.Bytes, x)
return obj
return None
# Convolution3D
def Weight(self, j):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6))
if o != 0:
a = self._tab.Vector(o)
return self._tab.Get(flatbuffers.number_types.Float32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4))
return 0
# Convolution3D
def WeightAsNumpy(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6))
if o != 0:
return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Float32Flags, o)
return 0
# Convolution3D
def WeightLength(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6))
if o != 0:
return self._tab.VectorLen(o)
return 0
# Convolution3D
def Bias(self, j):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8))
if o != 0:
a = self._tab.Vector(o)
return self._tab.Get(flatbuffers.number_types.Float32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4))
return 0
# Convolution3D
def BiasAsNumpy(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8))
if o != 0:
return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Float32Flags, o)
return 0
# Convolution3D
def BiasLength(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8))
if o != 0:
return self._tab.VectorLen(o)
return 0
def Convolution3DStart(builder): builder.StartObject(3)
def Convolution3DAddCommon(builder, common): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(common), 0)
def Convolution3DAddWeight(builder, weight): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(weight), 0)
def Convolution3DStartWeightVector(builder, numElems): return builder.StartVector(4, numElems, 4)
def Convolution3DAddBias(builder, bias): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(bias), 0)
def Convolution3DStartBiasVector(builder, numElems): return builder.StartVector(4, numElems, 4)
def Convolution3DEnd(builder): return builder.EndObject() | PypiClean |
/Muntjac-1.1.2.tar.gz/Muntjac-1.1.2/muntjac/terminal/gwt/server/application_runner_servlet.py |
import re
import logging
from muntjac.terminal.gwt.server.exceptions import ServletException
from muntjac.terminal.gwt.server.abstract_application_servlet import \
AbstractApplicationServlet
from muntjac.util import loadClass
logger = logging.getLogger(__name__)
class ApplicationRunnerServlet(AbstractApplicationServlet):
def awake(self, transaction):
super(ApplicationRunnerServlet, self).awake(transaction)
# The name of the application class currently used. Only valid
# within one request.
self._defaultPackages = None
self._request = None # ThreadLocal()
initParameter = self.getApplicationOrSystemProperty('defaultPackages',
None)
if initParameter is not None:
self._defaultPackages = re.split(',', initParameter)
def respond(self, transaction):
self._request = transaction.request()
super(ApplicationRunnerServlet, self).respond(transaction)
self._request = None
def getApplicationUrl(self, request):
path = super(ApplicationRunnerServlet, self).getApplicationUrl(request)
path += self.getApplicationRunnerApplicationClassName(request)
path += '/'
return path
def getNewApplication(self, request):
# Creates a new application instance
try:
application = self.getApplicationClass()()
return application
except TypeError:
raise ServletException('Failed to load application class: '
+ self.getApplicationRunnerApplicationClassName(request))
def getApplicationRunnerApplicationClassName(self, request):
return self.getApplicationRunnerURIs(request).applicationClassname
@classmethod
def getApplicationRunnerURIs(cls, request):
"""Parses application runner URIs.
If request URL is e.g.
http://localhost:8080/muntjac/run/muntjac.demo.calc.Calc then
- context=muntjac
- Runner servlet=run
- Muntjac application=muntjac.demo.calc.Calc
@return: string array containing widgetset URI, application URI and
context, runner, application classname
"""
urlParts = re.split('\\/', request.uri())
context = None
# String runner = null;
uris = URIS()
applicationClassname = None
contextPath = cls.getContextPath(request)
if urlParts[1] == re.sub('\\/', '', contextPath):
# class name comes after web context and runner application
context = urlParts[1]
# runner = urlParts[2]
if len(urlParts) == 3:
raise ValueError, 'No application specified'
applicationClassname = urlParts[3]
uris.staticFilesPath = '/' + context
# uris.applicationURI = "/" + context + "/" + runner + "/"
# + applicationClassname
# uris.context = context
# uris.runner = runner
uris.applicationClassname = applicationClassname
else:
# no context
context = ''
# runner = urlParts[1];
if len(urlParts) == 2:
raise ValueError, 'No application specified'
applicationClassname = urlParts[2]
uris.staticFilesPath = '/'
# uris.applicationURI = "/" + runner + "/" + applicationClassname
# uris.context = context
# uris.runner = runner
uris.applicationClassname = applicationClassname
return uris
def getApplicationClass(self):
appClass = None
baseName = self.getApplicationRunnerApplicationClassName(self._request)
try:
appClass = loadClass(baseName)
return appClass
except Exception:
if self._defaultPackages is not None:
for i in range(len(self._defaultPackages)):
try:
clsName = self._defaultPackages[i] + '.' + baseName
appClass = loadClass(clsName)
except TypeError:
pass # Ignore as this is expected for many packages
except Exception:
# TODO: handle exception
logger.info('Failed to find application '
'class in the default package.')
if appClass is not None:
return appClass
raise TypeError, 'class not found exception'
def getRequestPathInfo(self, request):
path = self.getPathInfo(request)
if path is None:
return None
clsName = self.getApplicationRunnerApplicationClassName(request)
path = path[1 + len(clsName):]
return path
def getStaticFilesLocation(self, request):
uris = self._getApplicationRunnerURIs(request)
staticFilesPath = uris.staticFilesPath
if staticFilesPath == '/':
staticFilesPath = ''
return staticFilesPath
class URIS(object):
def __init__(self):
self.staticFilesPath = None
# self.applicationURI;
# self.context;
# self.runner;
self.applicationClassname = None | PypiClean |
/AMAS_sb-1.0.1-py3-none-any.whl/AMAS/update_annotation.py |
# update_annotation.py
"""
Set annotation of a model file
Usage: python update_annotation.py res.csv files/BIOMD0000000190.xml BIOMD0000000190_upd.xml
"""
import argparse
import itertools
import libsbml
import numpy as np
import os
from os.path import dirname, abspath
import pandas as pd
import sys
sys.path.insert(0, dirname(dirname(abspath(__file__))))
from AMAS import constants as cn
from AMAS import annotation_maker as am
from AMAS import tools
def main():
parser = argparse.ArgumentParser(description='Update annotations of a model using user\'s feedback file (.csv)')
parser.add_argument('infile', type=str, help='path of a model file (.xml) to update annotation')
parser.add_argument('feedback', type=str, help='path of the file (.csv) containing user\'s feedback')
parser.add_argument('outfile', type=str, help='file path to save model with updated annotations')
# csv file with user choice
args = parser.parse_args()
user_csv = pd.read_csv(args.feedback)
# Only takes cells with values 'add' or 'delete'
chosen = user_csv[(user_csv['UPDATE ANNOTATION']=='add') |\
(user_csv['UPDATE ANNOTATION']=='delete')]
outfile = args.outfile
reader = libsbml.SBMLReader()
document = reader.readSBML(args.infile)
model = document.getModel()
ELEMENT_FUNC = {'species': model.getSpecies,
'reaction': model.getReaction}
element_types = list(np.unique(chosen['type']))
for one_type in element_types:
maker = am.AnnotationMaker(one_type)
ACTION_FUNC = {'delete': maker.deleteAnnotation,
'add': maker.addAnnotation}
df_type = chosen[chosen['type']==one_type]
uids = list(np.unique(df_type['id']))
meta_ids = {val:list(df_type[df_type['id']==val]['meta id'])[0] for val in uids}
# going through one id at a time
for one_id in uids:
orig_str = ELEMENT_FUNC[one_type](one_id).getAnnotationString()
df_id = df_type[df_type['id']==one_id]
dels = list(df_id[df_id[cn.DF_UPDATE_ANNOTATION_COL]=='delete'].loc[:, 'annotation'])
adds_raw = list(df_id[df_id[cn.DF_UPDATE_ANNOTATION_COL]=='add'].loc[:, 'annotation'])
# existing annotations to be kept
keeps = list(df_id[df_id[cn.DF_UPDATE_ANNOTATION_COL]=='keep'].loc[:, 'annotation'])
adds = list(set(adds_raw + keeps))
# if type is 'reaction', need to map rhea terms back to ec/kegg terms to delete them.
if one_type == 'reaction':
rhea_del_terms = list(set(itertools.chain(*[tools.getAssociatedTermsToRhea(val) for val in dels])))
deled = maker.deleteAnnotation(rhea_del_terms, orig_str)
elif one_type == 'species':
deled = maker.deleteAnnotation(dels, orig_str)
added = maker.addAnnotation(adds, deled, meta_ids[one_id])
ELEMENT_FUNC[one_type](one_id).setAnnotation(added)
libsbml.writeSBMLToFile(document, outfile)
print("...\nUpdated model file saved as:\n%s\n" % os.path.abspath(outfile))
if __name__ == '__main__':
main() | PypiClean |
/Flask-RESTbolt-0.1.0.tar.gz/Flask-RESTbolt-0.1.0/docs/fields.rst | .. _fields:
Output Fields
===============
.. currentmodule:: flask_restful
Flask-RESTful provides an easy way to control what data you actually render in
your response. With the :mod:`fields` module, you can use whatever objects (ORM
models/custom classes/etc.) you want in your resource. :mod:`fields` also lets you
format and filter the response so you don't have to worry about exposing
internal data structures.
It's also very clear when looking at your code what data will be rendered and
how it will be formatted.
Basic Usage
-----------
You can define a dict or OrderedDict of fields whose keys are names of
attributes or keys on the object to render, and whose values are a class that
will format & return the value for that field. This example has three fields:
two are :class:`~fields.String` and one is a :class:`~fields.DateTime`,
formatted as an RFC 822 date string (ISO 8601 is supported as well) ::
from flask_restful import Resource, fields, marshal_with
resource_fields = {
'name': fields.String,
'address': fields.String,
'date_updated': fields.DateTime(dt_format='rfc822'),
}
class Todo(Resource):
@marshal_with(resource_fields, envelope='resource')
def get(self, **kwargs):
return db_get_todo() # Some function that queries the db
This example assumes that you have a custom database object (``todo``) that
has attributes ``name``, ``address``, and ``date_updated``. Any additional
attributes on the object are considered private and won't be rendered in the
output. An optional ``envelope`` keyword argument is specified to wrap the
resulting output.
The decorator :class:`marshal_with` is what actually takes your data object and
applies the field filtering. The marshalling can work on single objects,
dicts, or lists of objects.
.. note ::
:class:`marshal_with` is a convenience decorator, that is functionally
equivalent to ::
class Todo(Resource):
def get(self, **kwargs):
return marshal(db_get_todo(), resource_fields), 200
This explicit expression can be used to return HTTP status codes other than 200
along with a successful response (see :func:`abort` for errors).
Renaming Attributes
-------------------
Often times your public facing field name is different from your internal field
name. To configure this mapping, use the ``attribute`` keyword argument. ::
fields = {
'name': fields.String(attribute='private_name'),
'address': fields.String,
}
A lambda (or any callable) can also be specified as the ``attribute`` ::
fields = {
'name': fields.String(attribute=lambda x: x._private_name),
'address': fields.String,
}
Nested properties can also be accessed with ``attribute`` ::
fields = {
'name': fields.String(attribute='people_list.0.person_dictionary.name'),
'address': fields.String,
}
Default Values
--------------
If for some reason your data object doesn't have an attribute in your fields
list, you can specify a default value to return instead of ``None``. ::
fields = {
'name': fields.String(default='Anonymous User'),
'address': fields.String,
}
Custom Fields & Multiple Values
-------------------------------
Sometimes you have your own custom formatting needs. You can subclass the
:class:`fields.Raw` class and implement the format function. This is especially
useful when an attribute stores multiple pieces of information. e.g. a
bit-field whose individual bits represent distinct values. You can use fields
to multiplex a single attribute to multiple output values.
This example assumes that bit 1 in the ``flags`` attribute signifies a
"Normal" or "Urgent" item, and bit 2 signifies "Read" or "Unread". These
items might be easy to store in a bitfield, but for a human readable output
it's nice to convert them to seperate string fields. ::
class UrgentItem(fields.Raw):
def format(self, value):
return "Urgent" if value & 0x01 else "Normal"
class UnreadItem(fields.Raw):
def format(self, value):
return "Unread" if value & 0x02 else "Read"
fields = {
'name': fields.String,
'priority': UrgentItem(attribute='flags'),
'status': UnreadItem(attribute='flags'),
}
Url & Other Concrete Fields
---------------------------
Flask-RESTful includes a special field, :class:`fields.Url`, that synthesizes a
uri for the resource that's being requested. This is also a good example of how
to add data to your response that's not actually present on your data object.::
class RandomNumber(fields.Raw):
def output(self, key, obj):
return random.random()
fields = {
'name': fields.String,
# todo_resource is the endpoint name when you called api.add_resource()
'uri': fields.Url('todo_resource'),
'random': RandomNumber,
}
By default :class:`fields.Url` returns a relative uri. To generate an absolute uri
that includes the scheme, hostname and port, pass the keyword argument
``absolute=True`` in the field declaration. To override the default scheme,
pass the ``scheme`` keyword argument::
fields = {
'uri': fields.Url('todo_resource', absolute=True)
'https_uri': fields.Url('todo_resource', absolute=True, scheme='https')
}
Complex Structures
------------------
You can have a flat structure that :meth:`marshal` will
transform to a nested structure ::
>>> from flask_restful import fields, marshal
>>> import json
>>>
>>> resource_fields = {'name': fields.String}
>>> resource_fields['address'] = {}
>>> resource_fields['address']['line 1'] = fields.String(attribute='addr1')
>>> resource_fields['address']['line 2'] = fields.String(attribute='addr2')
>>> resource_fields['address']['city'] = fields.String
>>> resource_fields['address']['state'] = fields.String
>>> resource_fields['address']['zip'] = fields.String
>>> data = {'name': 'bob', 'addr1': '123 fake street', 'addr2': '', 'city': 'New York', 'state': 'NY', 'zip': '10468'}
>>> json.dumps(marshal(data, resource_fields))
'{"name": "bob", "address": {"line 1": "123 fake street", "line 2": "", "state": "NY", "zip": "10468", "city": "New York"}}'
.. note ::
The address field doesn't actually exist on the data object, but any of
the sub-fields can access attributes directly from the object as if they
were not nested.
.. _list-field:
List Field
----------
You can also unmarshal fields as lists ::
>>> from flask_restful import fields, marshal
>>> import json
>>>
>>> resource_fields = {'name': fields.String, 'first_names': fields.List(fields.String)}
>>> data = {'name': 'Bougnazal', 'first_names' : ['Emile', 'Raoul']}
>>> json.dumps(marshal(data, resource_fields))
>>> '{"first_names": ["Emile", "Raoul"], "name": "Bougnazal"}'
.. _nested-field:
Advanced : Nested Field
-----------------------
While nesting fields using dicts can turn a flat data object into a nested
response, you can use :class:`~fields.Nested` to unmarshal nested data
structures and render them appropriately. ::
>>> from flask_restful import fields, marshal
>>> import json
>>>
>>> address_fields = {}
>>> address_fields['line 1'] = fields.String(attribute='addr1')
>>> address_fields['line 2'] = fields.String(attribute='addr2')
>>> address_fields['city'] = fields.String(attribute='city')
>>> address_fields['state'] = fields.String(attribute='state')
>>> address_fields['zip'] = fields.String(attribute='zip')
>>>
>>> resource_fields = {}
>>> resource_fields['name'] = fields.String
>>> resource_fields['billing_address'] = fields.Nested(address_fields)
>>> resource_fields['shipping_address'] = fields.Nested(address_fields)
>>> address1 = {'addr1': '123 fake street', 'city': 'New York', 'state': 'NY', 'zip': '10468'}
>>> address2 = {'addr1': '555 nowhere', 'city': 'New York', 'state': 'NY', 'zip': '10468'}
>>> data = { 'name': 'bob', 'billing_address': address1, 'shipping_address': address2}
>>>
>>> json.dumps(marshal_with(data, resource_fields))
'{"billing_address": {"line 1": "123 fake street", "line 2": null, "state": "NY", "zip": "10468", "city": "New York"}, "name": "bob", "shipping_address": {"line 1": "555 nowhere", "line 2": null, "state": "NY", "zip": "10468", "city": "New York"}}'
This example uses two ``Nested`` fields. The ``Nested`` constructor takes a
dict of fields to render as sub-fields. The important difference between
the ``Nested`` constructor and nested dicts (previous example), is the context
for attributes. In this example, ``billing_address`` is a complex object that
has its own fields and the context passed to the nested field is the sub-object
instead of the original ``data`` object. In other words:
``data.billing_address.addr1`` is in scope here, whereas in the previous
example ``data.addr1`` was the location attribute. Remember: ``Nested`` and
``List`` objects create a new scope for attributes.
Use :class:`~fields.Nested` with :class:`~fields.List` to marshal lists of more
complex objects: ::
user_fields = {
'id': fields.Integer,
'name': fields.String,
}
user_list_fields = {
fields.List(fields.Nested(user_fields)),
}
| PypiClean |
/CoSA-0.3.0.tar.gz/CoSA-0.3.0/cosa/modifiers/coi.py |
from pysmt.rewritings import conjunctive_partition
from pysmt.shortcuts import And, TRUE
from cosa.representation import TS, HTS
from cosa.utils.formula_mngm import get_free_variables
from cosa.printers.factory import HTSPrintersFactory
from cosa.utils.logger import Logger
class ConeOfInfluence(object):
var_deps = None
fv_dict = None
int_dict = None
save_model = False
def __init__(self):
self.fv_dict = {}
self.int_dict = {}
def _intersect(self, set1, set2):
el1 = (set1,set2)
el2 = (set2,set1)
if (el1 in self.int_dict) or (el2 in self.int_dict):
return self.int_dict[el1]
ret = False
for el in set1:
if el in set2:
ret = True
break
self.int_dict[el1] = ret
return ret
def _free_variables(self, formula):
if formula not in self.fv_dict:
fv = get_free_variables(formula)
self.fv_dict[formula] = frozenset([TS.get_ref_var(v) for v in fv])
return self.fv_dict[formula]
def _build_var_deps(self, hts):
if self.var_deps is not None:
return
self.var_deps = {}
ftrans = hts.single_ftrans()
for var, cond_assign_list in ftrans.items():
for refvar in self._free_variables(var):
if refvar not in self.var_deps:
self.var_deps[refvar] = []
for cass in cond_assign_list:
self.var_deps[refvar] += list(self._free_variables(cass[0]))
self.var_deps[refvar] += list(self._free_variables(cass[1]))
trans = list(conjunctive_partition(hts.single_trans(include_ftrans=False)))
invar = list(conjunctive_partition(hts.single_invar(include_ftrans=False)))
init = list(conjunctive_partition(hts.single_init()))
for ts_formula in [invar, trans, init]:
for f in ts_formula:
fv = self._free_variables(f)
for v in fv:
if v not in self.var_deps:
self.var_deps[v] = []
self.var_deps[v] += list(fv)
self.var_deps[v] = [x for x in set(self.var_deps[v]) if x != v]
def compute(self, hts, prop):
Logger.log("Building COI", 1)
self._build_var_deps(hts)
coi_vars = set(self._free_variables(prop))
if (len(coi_vars) < 1) or (self.var_deps == {}):
return hts
if hts.assumptions is not None:
for assumption in hts.assumptions:
for v in self._free_variables(assumption):
coi_vars.add(v)
if hts.lemmas is not None:
for lemma in hts.lemmas:
for v in self._free_variables(lemma):
coi_vars.add(v)
coits = TS("COI")
coi_vars = list(coi_vars)
i = 0
visited = set([])
while i < len(coi_vars):
var = coi_vars[i]
if (var in visited) or (var not in self.var_deps):
i += 1
continue
coi_vars = coi_vars[:i+1] + list(self.var_deps[var]) + coi_vars[i+1:]
visited.add(var)
i += 1
coi_vars = frozenset(coi_vars)
trans = list(conjunctive_partition(hts.single_trans(include_ftrans=True)))
invar = list(conjunctive_partition(hts.single_invar(include_ftrans=True)))
init = list(conjunctive_partition(hts.single_init()))
coits.trans = [f for f in trans if self._intersect(coi_vars, self._free_variables(f))]
coits.invar = [f for f in invar if self._intersect(coi_vars, self._free_variables(f))]
coits.init = [f for f in init if self._intersect(coi_vars, self._free_variables(f))]
Logger.log("COI statistics:", 1)
Logger.log(" Vars: %s -> %s"%(len(hts.vars), len(coi_vars)), 1)
Logger.log(" Init: %s -> %s"%(len(init), len(coits.init)), 1)
Logger.log(" Invar: %s -> %s"%(len(invar), len(coits.invar)), 1)
Logger.log(" Trans: %s -> %s"%(len(trans), len(coits.trans)), 1)
coits.trans = And(coits.trans)
coits.invar = And(coits.invar)
coits.init = And(coits.init)
coits.vars = set([])
for bf in [init,invar,trans]:
for f in bf:
for v in self._free_variables(f):
coits.vars.add(v)
coits.input_vars = set([v for v in coi_vars if v in hts.input_vars])
coits.output_vars = set([v for v in coi_vars if v in hts.output_vars])
coits.state_vars = set([v for v in coi_vars if v in hts.state_vars])
new_hts = HTS("COI")
new_hts.add_ts(coits)
if self.save_model:
printer = HTSPrintersFactory.printer_by_name("STS")
with open("/tmp/coi_model.ssts", "w") as f:
f.write(printer.print_hts(new_hts, []))
return new_hts | PypiClean |
/Office365-REST-Python-Client-2.4.3.tar.gz/Office365-REST-Python-Client-2.4.3/office365/sharepoint/portal/groups/site_manager.py | from office365.runtime.client_object import ClientObject
from office365.runtime.client_result import ClientResult
from office365.runtime.http.http_method import HttpMethod
from office365.runtime.queries.service_operation import ServiceOperationQuery
from office365.runtime.paths.resource_path import ResourcePath
from office365.sharepoint.portal.groups.creation_context import GroupCreationContext
from office365.sharepoint.portal.groups.creation_information import GroupCreationInformation
from office365.sharepoint.portal.groups.site_info import GroupSiteInfo
from office365.sharepoint.portal.teams.recent_and_joined_response import RecentAndJoinedTeamsResponse
class GroupSiteManager(ClientObject):
def __init__(self, context, resource_path=None):
if resource_path is None:
resource_path = ResourcePath("GroupSiteManager")
super(GroupSiteManager, self).__init__(context, resource_path)
def can_user_create_group(self):
return_type = ClientResult(self.context, bool())
qry = ServiceOperationQuery(self, "CanUserCreateGroup", None, None, None, return_type)
self.context.add_query(qry)
return return_type
def create_group_for_site(self, display_name, alias, is_public=None, optional_params=None):
"""
Create a modern site
:param str display_name:
:param str alias:
:param bool or None is_public:
:param office365.sharepoint.portal.group_creation_params.GroupCreationParams or None optional_params:
"""
payload = {
"displayName": display_name,
"alias": alias,
"isPublic": is_public,
"optionalParams": optional_params
}
return_type = ClientResult(self.context, GroupSiteInfo())
qry = ServiceOperationQuery(self, "CreateGroupForSite", None, payload, None, return_type)
self.context.add_query(qry)
return return_type
def create_group_ex(self, display_name, alias, is_public, optional_params=None):
"""
Creates a modern site
:param str display_name:
:param str alias:
:param bool is_public:
:param office365.sharepoint.portal.group_creation_params.GroupCreationParams or None optional_params:
"""
payload = GroupCreationInformation(display_name, alias, is_public, optional_params)
return_type = ClientResult(self.context, GroupSiteInfo())
qry = ServiceOperationQuery(self, "CreateGroupEx", None, payload, None, return_type)
self.context.add_query(qry)
return return_type
def delete(self, site_url):
"""
Deletes a SharePoint Team site
:type site_url: str
"""
payload = {
"siteUrl": site_url
}
qry = ServiceOperationQuery(self, "Delete", None, payload)
self.context.add_query(qry)
return self
def ensure_team_for_group(self):
return_type = ClientResult(self.context)
qry = ServiceOperationQuery(self, "EnsureTeamForGroup", None, None, None, return_type)
self.context.add_query(qry)
return return_type
def get_group_creation_context(self):
return_type = ClientResult(self.context, GroupCreationContext())
qry = ServiceOperationQuery(self, "GetGroupCreationContext", None, None, None, return_type)
self.context.add_query(qry)
return return_type
def get_status(self, group_id):
"""Get the status of a SharePoint site
:type group_id: str
"""
return_type = ClientResult(self.context, GroupSiteInfo())
qry = ServiceOperationQuery(self, "GetSiteStatus", None, {'groupId': group_id}, None, return_type)
self.context.add_query(qry)
def _construct_status_request(request):
request.method = HttpMethod.Get
request.url += "?groupId='{0}'".format(group_id)
self.context.before_execute(_construct_status_request)
return return_type
def get_current_user_joined_teams(self, get_logo_data=False, force_cache_update=False):
"""
Get the teams in Microsoft Teams that the current user is a direct member of.
:type get_logo_data: bool
:type force_cache_update: bool
"""
result = ClientResult(self.context, str())
payload = {
"getLogoData": get_logo_data,
"forceCacheUpdate": force_cache_update
}
qry = ServiceOperationQuery(self, "GetCurrentUserJoinedTeams", None, payload, None, result)
self.context.add_query(qry)
return result
def get_current_user_shared_channel_member_groups(self):
return_type = ClientResult(self.context)
qry = ServiceOperationQuery(self, "GetCurrentUserSharedChannelMemberGroups", None, None, None, return_type)
self.context.add_query(qry)
return return_type
def get_team_channels(self, team_id, use_staging_endpoint=False):
"""
:param str team_id:
:param bool use_staging_endpoint:
"""
return_type = ClientResult(self.context)
payload = {
"teamId": team_id,
"useStagingEndpoint": use_staging_endpoint
}
qry = ServiceOperationQuery(self, "GetTeamChannels", None, payload, None, return_type)
self.context.add_query(qry)
return return_type
def get_team_channels_direct(self, team_id):
"""
:param str team_id:
"""
return_type = ClientResult(self.context, str())
payload = {
"teamId": team_id,
}
qry = ServiceOperationQuery(self, "GetTeamChannelsDirect", None, payload, None, return_type)
self.context.add_query(qry)
return return_type
def notebook(self, group_id):
"""
:param str group_id:
"""
return_type = ClientResult(self.context, str())
payload = {"groupId": group_id}
qry = ServiceOperationQuery(self, "Notebook", None, payload, None, return_type)
self.context.add_query(qry)
return return_type
def recent_and_joined_teams(self, include_recent=None, include_teams=None, include_pinned=None,
existing_joined_teams_data=None):
"""
:param bool include_recent:
:param bool include_teams:
:param bool include_pinned:
:param str existing_joined_teams_data:
"""
return_type = ClientResult(self.context, RecentAndJoinedTeamsResponse())
payload = {
"includeRecent": include_recent,
"includeTeams": include_teams,
"includePinned": include_pinned,
"existingJoinedTeamsData": existing_joined_teams_data
}
qry = ServiceOperationQuery(self, "RecentAndJoinedTeams", None, payload, None, return_type)
self.context.add_query(qry)
return return_type | PypiClean |
/ELDAM_LCA-1.0-py3-none-any.whl/eldam/gui/dialogs.py | from PyQt5 import QtWidgets, QtCore, uic
from eldam.gui.gui_parameters import *
from eldam.core.parameters import CALCULATED_PARAMETERS_ATTRIBUTES, INPUT_PARAMETERS_ATTRIBUTES
from eldam.utils.gui import remove_path_from_error_message
from eldam.utils.misc import find_data_file
from eldam.settings import ELDAM_VERSION
class ResizableMessageBox(QtWidgets.QMessageBox):
"""
Resizable message box
Source:
https://stackoverflow.com/a/2664019
"""
def __init__(self):
QtWidgets.QMessageBox.__init__(self)
self.setSizeGripEnabled(True)
def event(self, e):
result = QtWidgets.QMessageBox.event(self, e)
self.setMinimumHeight(0)
self.setMaximumHeight(16777215)
self.setMinimumWidth(0)
self.setMaximumWidth(16777215)
self.setSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
text_edit = self.findChild(QtWidgets.QTextEdit)
if text_edit is not None:
text_edit.setMinimumHeight(0)
text_edit.setMaximumHeight(16777215)
text_edit.setMinimumWidth(0)
text_edit.setMaximumWidth(16777215)
text_edit.setSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
return result
class SimaProExportOpenDialog(QtWidgets.QFileDialog):
def __init__(self, parent, accept_multiple_files=True):
super().__init__(parent=parent)
self.setNameFilters(SIMAPRO_EXPORT_FILE_TYPE)
self.setDefaultSuffix(SIMAPRO_EXPORT_DEFAULT_EXTENSION)
self.setAcceptMode(QtWidgets.QFileDialog.AcceptOpen)
if accept_multiple_files:
self.setFileMode(QtWidgets.QFileDialog.ExistingFiles)
else:
self.setFileMode(QtWidgets.QFileDialog.ExistingFile)
class SimaProImportSaveDialog(QtWidgets.QFileDialog):
def __init__(self, parent):
super().__init__(parent=parent)
self.setNameFilters(SIMAPRO_IMPORT_FILE_TYPE)
self.setDefaultSuffix(SIMAPRO_IMPORT_DEFAULT_EXTENSION)
self.setAcceptMode(QtWidgets.QFileDialog.AcceptSave)
class EldaOpenDialog(QtWidgets.QFileDialog):
def __init__(self, parent, accept_multiple_files=True):
super().__init__(parent=parent)
self.setNameFilters(ELDA_FILE_TYPE)
self.setDefaultSuffix(ELDA_DEFAULT_EXTENSION)
self.setAcceptMode(QtWidgets.QFileDialog.AcceptOpen)
if accept_multiple_files:
self.setFileMode(QtWidgets.QFileDialog.ExistingFiles)
else:
self.setFileMode(QtWidgets.QFileDialog.ExistingFile)
class EldaSaveDialog(QtWidgets.QFileDialog):
def __init__(self, parent):
super().__init__(parent=parent)
self.setNameFilters(ELDA_FILE_TYPE)
self.setDefaultSuffix(ELDA_DEFAULT_EXTENSION)
self.setAcceptMode(QtWidgets.QFileDialog.AcceptSave)
class EldaIndexSaveDialog(QtWidgets.QFileDialog):
def __init__(self, parent):
super().__init__(parent=parent)
self.setNameFilters(ELDA_INDEX_FILE_TYPE)
self.setDefaultSuffix(ELDA_INDEX_DEFAULT_EXTENSION)
self.setAcceptMode(QtWidgets.QFileDialog.AcceptSave)
class ExistingEldaFileHandlingDialog(QtWidgets.QMessageBox):
""" Class used to ask the file handling method for existing elda files """
def __init__(self, parent, filename):
super().__init__(parent=parent)
self.setWindowTitle(EXISTING_FILE_TITLE)
self.setText(EXISTING_FILE_MESSAGE_ELDA.format(filename))
# Adding buttons (right to left)
self.addButton(CANCEL, QtWidgets.QMessageBox.NoRole)
self.addButton(ADD_MAJOR_VERSION, QtWidgets.QMessageBox.NoRole)
self.addButton(ADD_MINOR_VERSION, QtWidgets.QMessageBox.NoRole)
self.addButton(UPDATE_LAST_VERSION, QtWidgets.QMessageBox.NoRole)
self.addButton(OVERWRITE, QtWidgets.QMessageBox.NoRole)
self.addButton(RENAME, QtWidgets.QMessageBox.NoRole)
self.addButton(SKIP, QtWidgets.QMessageBox.NoRole)
class DirectoryOpenDialog(QtWidgets.QFileDialog):
def __init__(self, parent):
super().__init__(parent=parent)
self.setAcceptMode(QtWidgets.QFileDialog.AcceptOpen)
self.setFileMode(QtWidgets.QFileDialog.DirectoryOnly)
class RetryCancelDialog(QtWidgets.QMessageBox):
def __init__(self, parent, title, message, additional_info=None):
super().__init__(parent=parent)
self.setIcon(QtWidgets.QMessageBox.Critical)
self.setText(message)
self.setInformativeText(additional_info)
self.setWindowTitle(title)
self.setStandardButtons(QtWidgets.QMessageBox.Retry | QtWidgets.QMessageBox.Cancel)
class WarningDialog(QtWidgets.QMessageBox):
def __init__(self, parent, title, message, additional_info=None):
super().__init__(parent=parent)
self.setIcon(QtWidgets.QMessageBox.Warning)
self.setText(message)
self.setInformativeText(additional_info)
self.setWindowTitle(title)
self.setStandardButtons(QtWidgets.QMessageBox.Ok)
class ErrorDialog(QtWidgets.QMessageBox):
def __init__(self, parent, title, message, additional_info=None):
super().__init__(parent=parent)
self.setIcon(QtWidgets.QMessageBox.Critical)
self.setText(message)
self.setInformativeText(additional_info)
self.setWindowTitle(title)
self.setStandardButtons(QtWidgets.QMessageBox.Ok)
class ParameterConflictDialog(QtWidgets.QDialog):
def __init__(self, conflict, parent=None):
"""
Args:
conflict (list): List of parameter conflicts
parent (QtWidgets.QWidget): Parent widget
"""
super().__init__(parent=parent)
self.ui = uic.loadUi(find_data_file("files/user_interfaces/parameter_conflict_dialog.ui"), self)
self.setWindowTitle(PARAMETER_CONFLICT_TITLE)
self.label.setText(PARAMETER_CONFLICT_MESSAGE.format(len(conflict)))
# Getting the type of the parameters
param_type = None
param_types = set([param.type for param in conflict])
if len(param_types) == 1: # Only one type of parameters
param_type = param_types.pop()
# Adding columns according to parameter type
if param_type == 'Input parameter':
attributes = INPUT_PARAMETERS_ATTRIBUTES
elif param_type == 'Calculated parameter':
attributes = CALCULATED_PARAMETERS_ATTRIBUTES
else: # Comparing input parameters and calculated parameters
attributes = INPUT_PARAMETERS_ATTRIBUTES
# Setting columns
self.treeWidget.setColumnCount(len(attributes))
self.treeWidget.setHeaderLabels(attributes.values())
self.treeWidget.header().setSectionResizeMode(QtWidgets.QHeaderView.ResizeToContents)
# Alternating row colors
self.treeWidget.setAlternatingRowColors(True)
# Setting stylesheet
self.treeWidget.setStyleSheet(QTREEWIDGET_STYLESHEET)
# Inserting a tree item per parameter and selecting the first one by default
first = True
for parameter in conflict:
values = [str(getattr(parameter, attr) or '') for attr in attributes]
item = QtWidgets.QTreeWidgetItem(self.treeWidget, values)
if first:
self.treeWidget.setCurrentItem(item)
first = False
class AboutDialog(QtWidgets.QDialog):
""" About ELDAM dialog """
def __init__(self, parent):
super().__init__(parent=parent)
self.ui = uic.loadUi(find_data_file("files/user_interfaces/about.ui"), self)
self.eldam_version_label.setText(ELDAM_VERSION)
class RuntimeErrorDialog(QtWidgets.QMessageBox):
""" Dialog used show unexpected runtime error to the user. """
def __init__(self, message):
"""
Args:
message (str): Runtime exception message
"""
super().__init__()
self.setStandardButtons(QtWidgets.QMessageBox.Ok)
self.setDetailedText(remove_path_from_error_message(message)) | PypiClean |
/LumberMill-0.9.5.7-py3-none-any.whl/lumbermill/modifier/Permutate.py | import itertools
import sys
from lumbermill.BaseThreadedModule import BaseThreadedModule
from lumbermill.utils.Decorators import ModuleDocstringParser
@ModuleDocstringParser
class Permutate(BaseThreadedModule):
"""
Creates successive len('target_fields') length permutations of elements in 'source_field'.
To add some context data to each emitted event 'context_data_field' can specify a field
containing a dictionary with the values of 'source_field' as keys.
Configuration template:
- Permutate:
source_field: # <type: string; is: required>
target_fields: # <type: list; is: required>
context_data_field: # <default: ""; type:string; is: optional>
context_target_mapping: # <default: {}; type: dict; is: optional if context_data_field == "" else required>
receivers:
- NextModule
"""
module_type = "modifier"
"""Set module type"""
def handleEvent(self, event):
"""
Process the event.
@param event: dictionary
@return data: dictionary
"""
try:
context_data = event[self.getConfigurationValue('context_data_field')]
except KeyError:
context_data = False
try:
permutation_data = event[self.getConfigurationValue('source_field')]
except KeyError:
yield event
return
if type(permutation_data) is not list:
yield event
return
target_field_names = self.getConfigurationValue('target_fields')
context_target_mapping = self.getConfigurationValue('context_target_mapping')
for permutation in itertools.permutations(permutation_data, r=len(target_field_names)):
event_copy = event.copy()
if context_data:
try:
# Rewrite the context data keys to new keys in context_target_mapping
ctx_data = {}
for idx, dct in enumerate([context_data[key] for key in permutation if key in context_data]):
for mapping_key, newkeys in context_target_mapping.items():
if mapping_key in dct:
ctx_data[newkeys[idx]] = dct[mapping_key]
event_copy.update(ctx_data)
except:
etype, evalue, etb = sys.exc_info()
self.logger.warning("Could not add context data. Exception: %s, Error: %s." % (etype, evalue))
perm = dict(zip(target_field_names, permutation))
event_copy.update(perm)
yield event_copy | PypiClean |
/Flask-Statics-Helper-1.0.0.tar.gz/Flask-Statics-Helper-1.0.0/flask_statics/static/DataTables/extensions/TableTools/js/dataTables.tableTools.js | * @summary TableTools
* @description Tools and buttons for DataTables
* @version 2.2.3
* @file dataTables.tableTools.js
* @author SpryMedia Ltd (www.sprymedia.co.uk)
* @contact www.sprymedia.co.uk/contact
* @copyright Copyright 2009-2014 SpryMedia Ltd.
*
* This source file is free software, available under the following license:
* MIT license - http://datatables.net/license/mit
*
* This source file is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the license files for details.
*
* For details please refer to: http://www.datatables.net
*/
/* Global scope for TableTools for backwards compatibility.
* Will be removed in 2.3
*/
var TableTools;
(function(window, document, undefined) {
var factory = function( $, DataTable ) {
"use strict";
//include ZeroClipboard.js
/* ZeroClipboard 1.0.4
* Author: Joseph Huckaby
*/
var ZeroClipboard_TableTools = {
version: "1.0.4-TableTools2",
clients: {}, // registered upload clients on page, indexed by id
moviePath: '', // URL to movie
nextId: 1, // ID of next movie
$: function(thingy) {
// simple DOM lookup utility function
if (typeof(thingy) == 'string') {
thingy = document.getElementById(thingy);
}
if (!thingy.addClass) {
// extend element with a few useful methods
thingy.hide = function() { this.style.display = 'none'; };
thingy.show = function() { this.style.display = ''; };
thingy.addClass = function(name) { this.removeClass(name); this.className += ' ' + name; };
thingy.removeClass = function(name) {
this.className = this.className.replace( new RegExp("\\s*" + name + "\\s*"), " ").replace(/^\s+/, '').replace(/\s+$/, '');
};
thingy.hasClass = function(name) {
return !!this.className.match( new RegExp("\\s*" + name + "\\s*") );
};
}
return thingy;
},
setMoviePath: function(path) {
// set path to ZeroClipboard.swf
this.moviePath = path;
},
dispatch: function(id, eventName, args) {
// receive event from flash movie, send to client
var client = this.clients[id];
if (client) {
client.receiveEvent(eventName, args);
}
},
register: function(id, client) {
// register new client to receive events
this.clients[id] = client;
},
getDOMObjectPosition: function(obj) {
// get absolute coordinates for dom element
var info = {
left: 0,
top: 0,
width: obj.width ? obj.width : obj.offsetWidth,
height: obj.height ? obj.height : obj.offsetHeight
};
if ( obj.style.width !== "" ) {
info.width = obj.style.width.replace("px","");
}
if ( obj.style.height !== "" ) {
info.height = obj.style.height.replace("px","");
}
while (obj) {
info.left += obj.offsetLeft;
info.top += obj.offsetTop;
obj = obj.offsetParent;
}
return info;
},
Client: function(elem) {
// constructor for new simple upload client
this.handlers = {};
// unique ID
this.id = ZeroClipboard_TableTools.nextId++;
this.movieId = 'ZeroClipboard_TableToolsMovie_' + this.id;
// register client with singleton to receive flash events
ZeroClipboard_TableTools.register(this.id, this);
// create movie
if (elem) {
this.glue(elem);
}
}
};
ZeroClipboard_TableTools.Client.prototype = {
id: 0, // unique ID for us
ready: false, // whether movie is ready to receive events or not
movie: null, // reference to movie object
clipText: '', // text to copy to clipboard
fileName: '', // default file save name
action: 'copy', // action to perform
handCursorEnabled: true, // whether to show hand cursor, or default pointer cursor
cssEffects: true, // enable CSS mouse effects on dom container
handlers: null, // user event handlers
sized: false,
glue: function(elem, title) {
// glue to DOM element
// elem can be ID or actual DOM element object
this.domElement = ZeroClipboard_TableTools.$(elem);
// float just above object, or zIndex 99 if dom element isn't set
var zIndex = 99;
if (this.domElement.style.zIndex) {
zIndex = parseInt(this.domElement.style.zIndex, 10) + 1;
}
// find X/Y position of domElement
var box = ZeroClipboard_TableTools.getDOMObjectPosition(this.domElement);
// create floating DIV above element
this.div = document.createElement('div');
var style = this.div.style;
style.position = 'absolute';
style.left = '0px';
style.top = '0px';
style.width = (box.width) + 'px';
style.height = box.height + 'px';
style.zIndex = zIndex;
if ( typeof title != "undefined" && title !== "" ) {
this.div.title = title;
}
if ( box.width !== 0 && box.height !== 0 ) {
this.sized = true;
}
// style.backgroundColor = '#f00'; // debug
if ( this.domElement ) {
this.domElement.appendChild(this.div);
this.div.innerHTML = this.getHTML( box.width, box.height ).replace(/&/g, '&');
}
},
positionElement: function() {
var box = ZeroClipboard_TableTools.getDOMObjectPosition(this.domElement);
var style = this.div.style;
style.position = 'absolute';
//style.left = (this.domElement.offsetLeft)+'px';
//style.top = this.domElement.offsetTop+'px';
style.width = box.width + 'px';
style.height = box.height + 'px';
if ( box.width !== 0 && box.height !== 0 ) {
this.sized = true;
} else {
return;
}
var flash = this.div.childNodes[0];
flash.width = box.width;
flash.height = box.height;
},
getHTML: function(width, height) {
// return HTML for movie
var html = '';
var flashvars = 'id=' + this.id +
'&width=' + width +
'&height=' + height;
if (navigator.userAgent.match(/MSIE/)) {
// IE gets an OBJECT tag
var protocol = location.href.match(/^https/i) ? 'https://' : 'http://';
html += '<object classid="clsid:D27CDB6E-AE6D-11cf-96B8-444553540000" codebase="'+protocol+'download.macromedia.com/pub/shockwave/cabs/flash/swflash.cab#version=10,0,0,0" width="'+width+'" height="'+height+'" id="'+this.movieId+'" align="middle"><param name="allowScriptAccess" value="always" /><param name="allowFullScreen" value="false" /><param name="movie" value="'+ZeroClipboard_TableTools.moviePath+'" /><param name="loop" value="false" /><param name="menu" value="false" /><param name="quality" value="best" /><param name="bgcolor" value="#ffffff" /><param name="flashvars" value="'+flashvars+'"/><param name="wmode" value="transparent"/></object>';
}
else {
// all other browsers get an EMBED tag
html += '<embed id="'+this.movieId+'" src="'+ZeroClipboard_TableTools.moviePath+'" loop="false" menu="false" quality="best" bgcolor="#ffffff" width="'+width+'" height="'+height+'" name="'+this.movieId+'" align="middle" allowScriptAccess="always" allowFullScreen="false" type="application/x-shockwave-flash" pluginspage="http://www.macromedia.com/go/getflashplayer" flashvars="'+flashvars+'" wmode="transparent" />';
}
return html;
},
hide: function() {
// temporarily hide floater offscreen
if (this.div) {
this.div.style.left = '-2000px';
}
},
show: function() {
// show ourselves after a call to hide()
this.reposition();
},
destroy: function() {
// destroy control and floater
if (this.domElement && this.div) {
this.hide();
this.div.innerHTML = '';
var body = document.getElementsByTagName('body')[0];
try { body.removeChild( this.div ); } catch(e) {}
this.domElement = null;
this.div = null;
}
},
reposition: function(elem) {
// reposition our floating div, optionally to new container
// warning: container CANNOT change size, only position
if (elem) {
this.domElement = ZeroClipboard_TableTools.$(elem);
if (!this.domElement) {
this.hide();
}
}
if (this.domElement && this.div) {
var box = ZeroClipboard_TableTools.getDOMObjectPosition(this.domElement);
var style = this.div.style;
style.left = '' + box.left + 'px';
style.top = '' + box.top + 'px';
}
},
clearText: function() {
// clear the text to be copy / saved
this.clipText = '';
if (this.ready) {
this.movie.clearText();
}
},
appendText: function(newText) {
// append text to that which is to be copied / saved
this.clipText += newText;
if (this.ready) { this.movie.appendText(newText) ;}
},
setText: function(newText) {
// set text to be copied to be copied / saved
this.clipText = newText;
if (this.ready) { this.movie.setText(newText) ;}
},
setCharSet: function(charSet) {
// set the character set (UTF16LE or UTF8)
this.charSet = charSet;
if (this.ready) { this.movie.setCharSet(charSet) ;}
},
setBomInc: function(bomInc) {
// set if the BOM should be included or not
this.incBom = bomInc;
if (this.ready) { this.movie.setBomInc(bomInc) ;}
},
setFileName: function(newText) {
// set the file name
this.fileName = newText;
if (this.ready) {
this.movie.setFileName(newText);
}
},
setAction: function(newText) {
// set action (save or copy)
this.action = newText;
if (this.ready) {
this.movie.setAction(newText);
}
},
addEventListener: function(eventName, func) {
// add user event listener for event
// event types: load, queueStart, fileStart, fileComplete, queueComplete, progress, error, cancel
eventName = eventName.toString().toLowerCase().replace(/^on/, '');
if (!this.handlers[eventName]) {
this.handlers[eventName] = [];
}
this.handlers[eventName].push(func);
},
setHandCursor: function(enabled) {
// enable hand cursor (true), or default arrow cursor (false)
this.handCursorEnabled = enabled;
if (this.ready) {
this.movie.setHandCursor(enabled);
}
},
setCSSEffects: function(enabled) {
// enable or disable CSS effects on DOM container
this.cssEffects = !!enabled;
},
receiveEvent: function(eventName, args) {
var self;
// receive event from flash
eventName = eventName.toString().toLowerCase().replace(/^on/, '');
// special behavior for certain events
switch (eventName) {
case 'load':
// movie claims it is ready, but in IE this isn't always the case...
// bug fix: Cannot extend EMBED DOM elements in Firefox, must use traditional function
this.movie = document.getElementById(this.movieId);
if (!this.movie) {
self = this;
setTimeout( function() { self.receiveEvent('load', null); }, 1 );
return;
}
// firefox on pc needs a "kick" in order to set these in certain cases
if (!this.ready && navigator.userAgent.match(/Firefox/) && navigator.userAgent.match(/Windows/)) {
self = this;
setTimeout( function() { self.receiveEvent('load', null); }, 100 );
this.ready = true;
return;
}
this.ready = true;
this.movie.clearText();
this.movie.appendText( this.clipText );
this.movie.setFileName( this.fileName );
this.movie.setAction( this.action );
this.movie.setCharSet( this.charSet );
this.movie.setBomInc( this.incBom );
this.movie.setHandCursor( this.handCursorEnabled );
break;
case 'mouseover':
if (this.domElement && this.cssEffects) {
//this.domElement.addClass('hover');
if (this.recoverActive) {
this.domElement.addClass('active');
}
}
break;
case 'mouseout':
if (this.domElement && this.cssEffects) {
this.recoverActive = false;
if (this.domElement.hasClass('active')) {
this.domElement.removeClass('active');
this.recoverActive = true;
}
//this.domElement.removeClass('hover');
}
break;
case 'mousedown':
if (this.domElement && this.cssEffects) {
this.domElement.addClass('active');
}
break;
case 'mouseup':
if (this.domElement && this.cssEffects) {
this.domElement.removeClass('active');
this.recoverActive = false;
}
break;
} // switch eventName
if (this.handlers[eventName]) {
for (var idx = 0, len = this.handlers[eventName].length; idx < len; idx++) {
var func = this.handlers[eventName][idx];
if (typeof(func) == 'function') {
// actual function reference
func(this, args);
}
else if ((typeof(func) == 'object') && (func.length == 2)) {
// PHP style object + method, i.e. [myObject, 'myMethod']
func[0][ func[1] ](this, args);
}
else if (typeof(func) == 'string') {
// name of function
window[func](this, args);
}
} // foreach event handler defined
} // user defined handler for event
}
};
// For the Flash binding to work, ZeroClipboard_TableTools must be on the global
// object list
window.ZeroClipboard_TableTools = ZeroClipboard_TableTools;
//include TableTools.js
/* TableTools
* 2009-2014 SpryMedia Ltd - datatables.net/license
*/
/*globals TableTools,ZeroClipboard_TableTools*/
(function($, window, document) {
/**
* TableTools provides flexible buttons and other tools for a DataTables enhanced table
* @class TableTools
* @constructor
* @param {Object} oDT DataTables instance. When using DataTables 1.10 this can
* also be a jQuery collection, jQuery selector, table node, DataTables API
* instance or DataTables settings object.
* @param {Object} oOpts TableTools options
* @param {String} oOpts.sSwfPath ZeroClipboard SWF path
* @param {String} oOpts.sRowSelect Row selection options - 'none', 'single', 'multi' or 'os'
* @param {Function} oOpts.fnPreRowSelect Callback function just prior to row selection
* @param {Function} oOpts.fnRowSelected Callback function just after row selection
* @param {Function} oOpts.fnRowDeselected Callback function when row is deselected
* @param {Array} oOpts.aButtons List of buttons to be used
*/
TableTools = function( oDT, oOpts )
{
/* Santiy check that we are a new instance */
if ( ! this instanceof TableTools )
{
alert( "Warning: TableTools must be initialised with the keyword 'new'" );
}
// In 1.10 we can use the API to get the settings object from a number of
// sources
var dtSettings = $.fn.dataTable.Api ?
new $.fn.dataTable.Api( oDT ).settings()[0] :
oDT.fnSettings();
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Public class variables
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
/**
* @namespace Settings object which contains customisable information for TableTools instance
*/
this.s = {
/**
* Store 'this' so the instance can be retrieved from the settings object
* @property that
* @type object
* @default this
*/
"that": this,
/**
* DataTables settings objects
* @property dt
* @type object
* @default <i>From the oDT init option</i>
*/
"dt": dtSettings,
/**
* @namespace Print specific information
*/
"print": {
/**
* DataTables draw 'start' point before the printing display was shown
* @property saveStart
* @type int
* @default -1
*/
"saveStart": -1,
/**
* DataTables draw 'length' point before the printing display was shown
* @property saveLength
* @type int
* @default -1
*/
"saveLength": -1,
/**
* Page scrolling point before the printing display was shown so it can be restored
* @property saveScroll
* @type int
* @default -1
*/
"saveScroll": -1,
/**
* Wrapped function to end the print display (to maintain scope)
* @property funcEnd
* @type Function
* @default function () {}
*/
"funcEnd": function () {}
},
/**
* A unique ID is assigned to each button in each instance
* @property buttonCounter
* @type int
* @default 0
*/
"buttonCounter": 0,
/**
* @namespace Select rows specific information
*/
"select": {
/**
* Select type - can be 'none', 'single' or 'multi'
* @property type
* @type string
* @default ""
*/
"type": "",
/**
* Array of nodes which are currently selected
* @property selected
* @type array
* @default []
*/
"selected": [],
/**
* Function to run before the selection can take place. Will cancel the select if the
* function returns false
* @property preRowSelect
* @type Function
* @default null
*/
"preRowSelect": null,
/**
* Function to run when a row is selected
* @property postSelected
* @type Function
* @default null
*/
"postSelected": null,
/**
* Function to run when a row is deselected
* @property postDeselected
* @type Function
* @default null
*/
"postDeselected": null,
/**
* Indicate if all rows are selected (needed for server-side processing)
* @property all
* @type boolean
* @default false
*/
"all": false,
/**
* Class name to add to selected TR nodes
* @property selectedClass
* @type String
* @default ""
*/
"selectedClass": ""
},
/**
* Store of the user input customisation object
* @property custom
* @type object
* @default {}
*/
"custom": {},
/**
* SWF movie path
* @property swfPath
* @type string
* @default ""
*/
"swfPath": "",
/**
* Default button set
* @property buttonSet
* @type array
* @default []
*/
"buttonSet": [],
/**
* When there is more than one TableTools instance for a DataTable, there must be a
* master which controls events (row selection etc)
* @property master
* @type boolean
* @default false
*/
"master": false,
/**
* Tag names that are used for creating collections and buttons
* @namesapce
*/
"tags": {}
};
/**
* @namespace Common and useful DOM elements for the class instance
*/
this.dom = {
/**
* DIV element that is create and all TableTools buttons (and their children) put into
* @property container
* @type node
* @default null
*/
"container": null,
/**
* The table node to which TableTools will be applied
* @property table
* @type node
* @default null
*/
"table": null,
/**
* @namespace Nodes used for the print display
*/
"print": {
/**
* Nodes which have been removed from the display by setting them to display none
* @property hidden
* @type array
* @default []
*/
"hidden": [],
/**
* The information display saying telling the user about the print display
* @property message
* @type node
* @default null
*/
"message": null
},
/**
* @namespace Nodes used for a collection display. This contains the currently used collection
*/
"collection": {
/**
* The div wrapper containing the buttons in the collection (i.e. the menu)
* @property collection
* @type node
* @default null
*/
"collection": null,
/**
* Background display to provide focus and capture events
* @property background
* @type node
* @default null
*/
"background": null
}
};
/**
* @namespace Name space for the classes that this TableTools instance will use
* @extends TableTools.classes
*/
this.classes = $.extend( true, {}, TableTools.classes );
if ( this.s.dt.bJUI )
{
$.extend( true, this.classes, TableTools.classes_themeroller );
}
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Public class methods
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
/**
* Retreieve the settings object from an instance
* @method fnSettings
* @returns {object} TableTools settings object
*/
this.fnSettings = function () {
return this.s;
};
/* Constructor logic */
if ( typeof oOpts == 'undefined' )
{
oOpts = {};
}
TableTools._aInstances.push( this );
this._fnConstruct( oOpts );
return this;
};
TableTools.prototype = {
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Public methods
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
/**
* Retreieve the settings object from an instance
* @returns {array} List of TR nodes which are currently selected
* @param {boolean} [filtered=false] Get only selected rows which are
* available given the filtering applied to the table. By default
* this is false - i.e. all rows, regardless of filtering are
selected.
*/
"fnGetSelected": function ( filtered )
{
var
out = [],
data = this.s.dt.aoData,
displayed = this.s.dt.aiDisplay,
i, iLen;
if ( filtered )
{
// Only consider filtered rows
for ( i=0, iLen=displayed.length ; i<iLen ; i++ )
{
if ( data[ displayed[i] ]._DTTT_selected )
{
out.push( data[ displayed[i] ].nTr );
}
}
}
else
{
// Use all rows
for ( i=0, iLen=data.length ; i<iLen ; i++ )
{
if ( data[i]._DTTT_selected )
{
out.push( data[i].nTr );
}
}
}
return out;
},
/**
* Get the data source objects/arrays from DataTables for the selected rows (same as
* fnGetSelected followed by fnGetData on each row from the table)
* @returns {array} Data from the TR nodes which are currently selected
*/
"fnGetSelectedData": function ()
{
var out = [];
var data=this.s.dt.aoData;
var i, iLen;
for ( i=0, iLen=data.length ; i<iLen ; i++ )
{
if ( data[i]._DTTT_selected )
{
out.push( this.s.dt.oInstance.fnGetData(i) );
}
}
return out;
},
/**
* Get the indexes of the selected rows
* @returns {array} List of row indexes
* @param {boolean} [filtered=false] Get only selected rows which are
* available given the filtering applied to the table. By default
* this is false - i.e. all rows, regardless of filtering are
selected.
*/
"fnGetSelectedIndexes": function ( filtered )
{
var
out = [],
data = this.s.dt.aoData,
displayed = this.s.dt.aiDisplay,
i, iLen;
if ( filtered )
{
// Only consider filtered rows
for ( i=0, iLen=displayed.length ; i<iLen ; i++ )
{
if ( data[ displayed[i] ]._DTTT_selected )
{
out.push( displayed[i] );
}
}
}
else
{
// Use all rows
for ( i=0, iLen=data.length ; i<iLen ; i++ )
{
if ( data[i]._DTTT_selected )
{
out.push( i );
}
}
}
return out;
},
/**
* Check to see if a current row is selected or not
* @param {Node} n TR node to check if it is currently selected or not
* @returns {Boolean} true if select, false otherwise
*/
"fnIsSelected": function ( n )
{
var pos = this.s.dt.oInstance.fnGetPosition( n );
return (this.s.dt.aoData[pos]._DTTT_selected===true) ? true : false;
},
/**
* Select all rows in the table
* @param {boolean} [filtered=false] Select only rows which are available
* given the filtering applied to the table. By default this is false -
* i.e. all rows, regardless of filtering are selected.
*/
"fnSelectAll": function ( filtered )
{
this._fnRowSelect( filtered ?
this.s.dt.aiDisplay :
this.s.dt.aoData
);
},
/**
* Deselect all rows in the table
* @param {boolean} [filtered=false] Deselect only rows which are available
* given the filtering applied to the table. By default this is false -
* i.e. all rows, regardless of filtering are deselected.
*/
"fnSelectNone": function ( filtered )
{
this._fnRowDeselect( this.fnGetSelectedIndexes(filtered) );
},
/**
* Select row(s)
* @param {node|object|array} n The row(s) to select. Can be a single DOM
* TR node, an array of TR nodes or a jQuery object.
*/
"fnSelect": function ( n )
{
if ( this.s.select.type == "single" )
{
this.fnSelectNone();
this._fnRowSelect( n );
}
else
{
this._fnRowSelect( n );
}
},
/**
* Deselect row(s)
* @param {node|object|array} n The row(s) to deselect. Can be a single DOM
* TR node, an array of TR nodes or a jQuery object.
*/
"fnDeselect": function ( n )
{
this._fnRowDeselect( n );
},
/**
* Get the title of the document - useful for file names. The title is retrieved from either
* the configuration object's 'title' parameter, or the HTML document title
* @param {Object} oConfig Button configuration object
* @returns {String} Button title
*/
"fnGetTitle": function( oConfig )
{
var sTitle = "";
if ( typeof oConfig.sTitle != 'undefined' && oConfig.sTitle !== "" ) {
sTitle = oConfig.sTitle;
} else {
var anTitle = document.getElementsByTagName('title');
if ( anTitle.length > 0 )
{
sTitle = anTitle[0].innerHTML;
}
}
/* Strip characters which the OS will object to - checking for UTF8 support in the scripting
* engine
*/
if ( "\u00A1".toString().length < 4 ) {
return sTitle.replace(/[^a-zA-Z0-9_\u00A1-\uFFFF\.,\-_ !\(\)]/g, "");
} else {
return sTitle.replace(/[^a-zA-Z0-9_\.,\-_ !\(\)]/g, "");
}
},
/**
* Calculate a unity array with the column width by proportion for a set of columns to be
* included for a button. This is particularly useful for PDF creation, where we can use the
* column widths calculated by the browser to size the columns in the PDF.
* @param {Object} oConfig Button configuration object
* @returns {Array} Unity array of column ratios
*/
"fnCalcColRatios": function ( oConfig )
{
var
aoCols = this.s.dt.aoColumns,
aColumnsInc = this._fnColumnTargets( oConfig.mColumns ),
aColWidths = [],
iWidth = 0, iTotal = 0, i, iLen;
for ( i=0, iLen=aColumnsInc.length ; i<iLen ; i++ )
{
if ( aColumnsInc[i] )
{
iWidth = aoCols[i].nTh.offsetWidth;
iTotal += iWidth;
aColWidths.push( iWidth );
}
}
for ( i=0, iLen=aColWidths.length ; i<iLen ; i++ )
{
aColWidths[i] = aColWidths[i] / iTotal;
}
return aColWidths.join('\t');
},
/**
* Get the information contained in a table as a string
* @param {Object} oConfig Button configuration object
* @returns {String} Table data as a string
*/
"fnGetTableData": function ( oConfig )
{
/* In future this could be used to get data from a plain HTML source as well as DataTables */
if ( this.s.dt )
{
return this._fnGetDataTablesData( oConfig );
}
},
/**
* Pass text to a flash button instance, which will be used on the button's click handler
* @param {Object} clip Flash button object
* @param {String} text Text to set
*/
"fnSetText": function ( clip, text )
{
this._fnFlashSetText( clip, text );
},
/**
* Resize the flash elements of the buttons attached to this TableTools instance - this is
* useful for when initialising TableTools when it is hidden (display:none) since sizes can't
* be calculated at that time.
*/
"fnResizeButtons": function ()
{
for ( var cli in ZeroClipboard_TableTools.clients )
{
if ( cli )
{
var client = ZeroClipboard_TableTools.clients[cli];
if ( typeof client.domElement != 'undefined' &&
client.domElement.parentNode )
{
client.positionElement();
}
}
}
},
/**
* Check to see if any of the ZeroClipboard client's attached need to be resized
*/
"fnResizeRequired": function ()
{
for ( var cli in ZeroClipboard_TableTools.clients )
{
if ( cli )
{
var client = ZeroClipboard_TableTools.clients[cli];
if ( typeof client.domElement != 'undefined' &&
client.domElement.parentNode == this.dom.container &&
client.sized === false )
{
return true;
}
}
}
return false;
},
/**
* Programmatically enable or disable the print view
* @param {boolean} [bView=true] Show the print view if true or not given. If false, then
* terminate the print view and return to normal.
* @param {object} [oConfig={}] Configuration for the print view
* @param {boolean} [oConfig.bShowAll=false] Show all rows in the table if true
* @param {string} [oConfig.sInfo] Information message, displayed as an overlay to the
* user to let them know what the print view is.
* @param {string} [oConfig.sMessage] HTML string to show at the top of the document - will
* be included in the printed document.
*/
"fnPrint": function ( bView, oConfig )
{
if ( oConfig === undefined )
{
oConfig = {};
}
if ( bView === undefined || bView )
{
this._fnPrintStart( oConfig );
}
else
{
this._fnPrintEnd();
}
},
/**
* Show a message to the end user which is nicely styled
* @param {string} message The HTML string to show to the user
* @param {int} time The duration the message is to be shown on screen for (mS)
*/
"fnInfo": function ( message, time ) {
var info = $('<div/>')
.addClass( this.classes.print.info )
.html( message )
.appendTo( 'body' );
setTimeout( function() {
info.fadeOut( "normal", function() {
info.remove();
} );
}, time );
},
/**
* Get the container element of the instance for attaching to the DOM
* @returns {node} DOM node
*/
"fnContainer": function () {
return this.dom.container;
},
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Private methods (they are of course public in JS, but recommended as private)
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
/**
* Constructor logic
* @method _fnConstruct
* @param {Object} oOpts Same as TableTools constructor
* @returns void
* @private
*/
"_fnConstruct": function ( oOpts )
{
var that = this;
this._fnCustomiseSettings( oOpts );
/* Container element */
this.dom.container = document.createElement( this.s.tags.container );
this.dom.container.className = this.classes.container;
/* Row selection config */
if ( this.s.select.type != 'none' )
{
this._fnRowSelectConfig();
}
/* Buttons */
this._fnButtonDefinations( this.s.buttonSet, this.dom.container );
/* Destructor */
this.s.dt.aoDestroyCallback.push( {
"sName": "TableTools",
"fn": function () {
$(that.s.dt.nTBody).off( 'click.DTTT_Select', 'tr' );
$(that.dom.container).empty();
// Remove the instance
var idx = $.inArray( that, TableTools._aInstances );
if ( idx !== -1 ) {
TableTools._aInstances.splice( idx, 1 );
}
}
} );
},
/**
* Take the user defined settings and the default settings and combine them.
* @method _fnCustomiseSettings
* @param {Object} oOpts Same as TableTools constructor
* @returns void
* @private
*/
"_fnCustomiseSettings": function ( oOpts )
{
/* Is this the master control instance or not? */
if ( typeof this.s.dt._TableToolsInit == 'undefined' )
{
this.s.master = true;
this.s.dt._TableToolsInit = true;
}
/* We can use the table node from comparisons to group controls */
this.dom.table = this.s.dt.nTable;
/* Clone the defaults and then the user options */
this.s.custom = $.extend( {}, TableTools.DEFAULTS, oOpts );
/* Flash file location */
this.s.swfPath = this.s.custom.sSwfPath;
if ( typeof ZeroClipboard_TableTools != 'undefined' )
{
ZeroClipboard_TableTools.moviePath = this.s.swfPath;
}
/* Table row selecting */
this.s.select.type = this.s.custom.sRowSelect;
this.s.select.preRowSelect = this.s.custom.fnPreRowSelect;
this.s.select.postSelected = this.s.custom.fnRowSelected;
this.s.select.postDeselected = this.s.custom.fnRowDeselected;
// Backwards compatibility - allow the user to specify a custom class in the initialiser
if ( this.s.custom.sSelectedClass )
{
this.classes.select.row = this.s.custom.sSelectedClass;
}
this.s.tags = this.s.custom.oTags;
/* Button set */
this.s.buttonSet = this.s.custom.aButtons;
},
/**
* Take the user input arrays and expand them to be fully defined, and then add them to a given
* DOM element
* @method _fnButtonDefinations
* @param {array} buttonSet Set of user defined buttons
* @param {node} wrapper Node to add the created buttons to
* @returns void
* @private
*/
"_fnButtonDefinations": function ( buttonSet, wrapper )
{
var buttonDef;
for ( var i=0, iLen=buttonSet.length ; i<iLen ; i++ )
{
if ( typeof buttonSet[i] == "string" )
{
if ( typeof TableTools.BUTTONS[ buttonSet[i] ] == 'undefined' )
{
alert( "TableTools: Warning - unknown button type: "+buttonSet[i] );
continue;
}
buttonDef = $.extend( {}, TableTools.BUTTONS[ buttonSet[i] ], true );
}
else
{
if ( typeof TableTools.BUTTONS[ buttonSet[i].sExtends ] == 'undefined' )
{
alert( "TableTools: Warning - unknown button type: "+buttonSet[i].sExtends );
continue;
}
var o = $.extend( {}, TableTools.BUTTONS[ buttonSet[i].sExtends ], true );
buttonDef = $.extend( o, buttonSet[i], true );
}
var button = this._fnCreateButton(
buttonDef,
$(wrapper).hasClass(this.classes.collection.container)
);
if ( button ) {
wrapper.appendChild( button );
}
}
},
/**
* Create and configure a TableTools button
* @method _fnCreateButton
* @param {Object} oConfig Button configuration object
* @returns {Node} Button element
* @private
*/
"_fnCreateButton": function ( oConfig, bCollectionButton )
{
var nButton = this._fnButtonBase( oConfig, bCollectionButton );
if ( oConfig.sAction.match(/flash/) )
{
if ( ! this._fnHasFlash() ) {
return false;
}
this._fnFlashConfig( nButton, oConfig );
}
else if ( oConfig.sAction == "text" )
{
this._fnTextConfig( nButton, oConfig );
}
else if ( oConfig.sAction == "div" )
{
this._fnTextConfig( nButton, oConfig );
}
else if ( oConfig.sAction == "collection" )
{
this._fnTextConfig( nButton, oConfig );
this._fnCollectionConfig( nButton, oConfig );
}
if ( this.s.dt.iTabIndex !== -1 ) {
$(nButton)
.attr( 'tabindex', this.s.dt.iTabIndex )
.attr( 'aria-controls', this.s.dt.sTableId )
.on( 'keyup.DTTT', function (e) {
// Trigger the click event on return key when focused.
// Note that for Flash buttons this has no effect since we
// can't programmatically trigger the Flash export
if ( e.keyCode === 13 ) {
e.stopPropagation();
$(this).trigger( 'click' );
}
} )
.on( 'mousedown.DTTT', function (e) {
// On mousedown we want to stop the focus occurring on the
// button, focus is used only for the keyboard navigation.
// But using preventDefault for the flash buttons stops the
// flash action. However, it is not the button that gets
// focused but the flash element for flash buttons, so this
// works
if ( ! oConfig.sAction.match(/flash/) ) {
e.preventDefault();
}
} );
}
return nButton;
},
/**
* Create the DOM needed for the button and apply some base properties. All buttons start here
* @method _fnButtonBase
* @param {o} oConfig Button configuration object
* @returns {Node} DIV element for the button
* @private
*/
"_fnButtonBase": function ( o, bCollectionButton )
{
var sTag, sLiner, sClass;
if ( bCollectionButton )
{
sTag = o.sTag && o.sTag !== "default" ? o.sTag : this.s.tags.collection.button;
sLiner = o.sLinerTag && o.sLinerTag !== "default" ? o.sLiner : this.s.tags.collection.liner;
sClass = this.classes.collection.buttons.normal;
}
else
{
sTag = o.sTag && o.sTag !== "default" ? o.sTag : this.s.tags.button;
sLiner = o.sLinerTag && o.sLinerTag !== "default" ? o.sLiner : this.s.tags.liner;
sClass = this.classes.buttons.normal;
}
var
nButton = document.createElement( sTag ),
nSpan = document.createElement( sLiner ),
masterS = this._fnGetMasterSettings();
nButton.className = sClass+" "+o.sButtonClass;
nButton.setAttribute('id', "ToolTables_"+this.s.dt.sInstance+"_"+masterS.buttonCounter );
nButton.appendChild( nSpan );
nSpan.innerHTML = o.sButtonText;
masterS.buttonCounter++;
return nButton;
},
/**
* Get the settings object for the master instance. When more than one TableTools instance is
* assigned to a DataTable, only one of them can be the 'master' (for the select rows). As such,
* we will typically want to interact with that master for global properties.
* @method _fnGetMasterSettings
* @returns {Object} TableTools settings object
* @private
*/
"_fnGetMasterSettings": function ()
{
if ( this.s.master )
{
return this.s;
}
else
{
/* Look for the master which has the same DT as this one */
var instances = TableTools._aInstances;
for ( var i=0, iLen=instances.length ; i<iLen ; i++ )
{
if ( this.dom.table == instances[i].s.dt.nTable )
{
return instances[i].s;
}
}
}
},
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Button collection functions
*/
/**
* Create a collection button, when activated will present a drop down list of other buttons
* @param {Node} nButton Button to use for the collection activation
* @param {Object} oConfig Button configuration object
* @returns void
* @private
*/
"_fnCollectionConfig": function ( nButton, oConfig )
{
var nHidden = document.createElement( this.s.tags.collection.container );
nHidden.style.display = "none";
nHidden.className = this.classes.collection.container;
oConfig._collection = nHidden;
document.body.appendChild( nHidden );
this._fnButtonDefinations( oConfig.aButtons, nHidden );
},
/**
* Show a button collection
* @param {Node} nButton Button to use for the collection
* @param {Object} oConfig Button configuration object
* @returns void
* @private
*/
"_fnCollectionShow": function ( nButton, oConfig )
{
var
that = this,
oPos = $(nButton).offset(),
nHidden = oConfig._collection,
iDivX = oPos.left,
iDivY = oPos.top + $(nButton).outerHeight(),
iWinHeight = $(window).height(), iDocHeight = $(document).height(),
iWinWidth = $(window).width(), iDocWidth = $(document).width();
nHidden.style.position = "absolute";
nHidden.style.left = iDivX+"px";
nHidden.style.top = iDivY+"px";
nHidden.style.display = "block";
$(nHidden).css('opacity',0);
var nBackground = document.createElement('div');
nBackground.style.position = "absolute";
nBackground.style.left = "0px";
nBackground.style.top = "0px";
nBackground.style.height = ((iWinHeight>iDocHeight)? iWinHeight : iDocHeight) +"px";
nBackground.style.width = ((iWinWidth>iDocWidth)? iWinWidth : iDocWidth) +"px";
nBackground.className = this.classes.collection.background;
$(nBackground).css('opacity',0);
document.body.appendChild( nBackground );
document.body.appendChild( nHidden );
/* Visual corrections to try and keep the collection visible */
var iDivWidth = $(nHidden).outerWidth();
var iDivHeight = $(nHidden).outerHeight();
if ( iDivX + iDivWidth > iDocWidth )
{
nHidden.style.left = (iDocWidth-iDivWidth)+"px";
}
if ( iDivY + iDivHeight > iDocHeight )
{
nHidden.style.top = (iDivY-iDivHeight-$(nButton).outerHeight())+"px";
}
this.dom.collection.collection = nHidden;
this.dom.collection.background = nBackground;
/* This results in a very small delay for the end user but it allows the animation to be
* much smoother. If you don't want the animation, then the setTimeout can be removed
*/
setTimeout( function () {
$(nHidden).animate({"opacity": 1}, 500);
$(nBackground).animate({"opacity": 0.25}, 500);
}, 10 );
/* Resize the buttons to the Flash contents fit */
this.fnResizeButtons();
/* Event handler to remove the collection display */
$(nBackground).click( function () {
that._fnCollectionHide.call( that, null, null );
} );
},
/**
* Hide a button collection
* @param {Node} nButton Button to use for the collection
* @param {Object} oConfig Button configuration object
* @returns void
* @private
*/
"_fnCollectionHide": function ( nButton, oConfig )
{
if ( oConfig !== null && oConfig.sExtends == 'collection' )
{
return;
}
if ( this.dom.collection.collection !== null )
{
$(this.dom.collection.collection).animate({"opacity": 0}, 500, function (e) {
this.style.display = "none";
} );
$(this.dom.collection.background).animate({"opacity": 0}, 500, function (e) {
this.parentNode.removeChild( this );
} );
this.dom.collection.collection = null;
this.dom.collection.background = null;
}
},
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Row selection functions
*/
/**
* Add event handlers to a table to allow for row selection
* @method _fnRowSelectConfig
* @returns void
* @private
*/
"_fnRowSelectConfig": function ()
{
if ( this.s.master )
{
var
that = this,
i, iLen,
dt = this.s.dt,
aoOpenRows = this.s.dt.aoOpenRows;
$(dt.nTable).addClass( this.classes.select.table );
// When using OS style selection, we want to cancel the shift text
// selection, but only when the shift key is used (so you can
// actually still select text in the table)
if ( this.s.select.type === 'os' ) {
$(dt.nTBody).on( 'mousedown.DTTT_Select', 'tr', function(e) {
if ( e.shiftKey ) {
$(dt.nTBody)
.css( '-moz-user-select', 'none' )
.one('selectstart.DTTT_Select', 'tr', function () {
return false;
} );
}
} );
$(dt.nTBody).on( 'mouseup.DTTT_Select', 'tr', function(e) {
$(dt.nTBody).css( '-moz-user-select', '' );
} );
}
// Row selection
$(dt.nTBody).on( 'click.DTTT_Select', this.s.custom.sRowSelector, function(e) {
var row = this.nodeName.toLowerCase() === 'tr' ?
this :
$(this).parents('tr')[0];
var select = that.s.select;
var pos = that.s.dt.oInstance.fnGetPosition( row );
/* Sub-table must be ignored (odd that the selector won't do this with >) */
if ( row.parentNode != dt.nTBody ) {
return;
}
/* Check that we are actually working with a DataTables controlled row */
if ( dt.oInstance.fnGetData(row) === null ) {
return;
}
// Shift click, ctrl click and simple click handling to make
// row selection a lot like a file system in desktop OSs
if ( select.type == 'os' ) {
if ( e.ctrlKey || e.metaKey ) {
// Add or remove from the selection
if ( that.fnIsSelected( row ) ) {
that._fnRowDeselect( row, e );
}
else {
that._fnRowSelect( row, e );
}
}
else if ( e.shiftKey ) {
// Add a range of rows, from the last selected row to
// this one
var rowIdxs = that.s.dt.aiDisplay.slice(); // visible rows
var idx1 = $.inArray( select.lastRow, rowIdxs );
var idx2 = $.inArray( pos, rowIdxs );
if ( that.fnGetSelected().length === 0 || idx1 === -1 ) {
// select from top to here - slightly odd, but both
// Windows and Mac OS do this
rowIdxs.splice( $.inArray( pos, rowIdxs )+1, rowIdxs.length );
}
else {
// reverse so we can shift click 'up' as well as down
if ( idx1 > idx2 ) {
var tmp = idx2;
idx2 = idx1;
idx1 = tmp;
}
rowIdxs.splice( idx2+1, rowIdxs.length );
rowIdxs.splice( 0, idx1 );
}
if ( ! that.fnIsSelected( row ) ) {
// Select range
that._fnRowSelect( rowIdxs, e );
}
else {
// Deselect range - need to keep the clicked on row selected
rowIdxs.splice( $.inArray( pos, rowIdxs ), 1 );
that._fnRowDeselect( rowIdxs, e );
}
}
else {
// No cmd or shift click. Deselect current if selected,
// or select this row only
if ( that.fnIsSelected( row ) && that.fnGetSelected().length === 1 ) {
that._fnRowDeselect( row, e );
}
else {
that.fnSelectNone();
that._fnRowSelect( row, e );
}
}
}
else if ( that.fnIsSelected( row ) ) {
that._fnRowDeselect( row, e );
}
else if ( select.type == "single" ) {
that.fnSelectNone();
that._fnRowSelect( row, e );
}
else if ( select.type == "multi" ) {
that._fnRowSelect( row, e );
}
select.lastRow = pos;
} );//.on('selectstart', function () { return false; } );
// Bind a listener to the DataTable for when new rows are created.
// This allows rows to be visually selected when they should be and
// deferred rendering is used.
dt.oApi._fnCallbackReg( dt, 'aoRowCreatedCallback', function (tr, data, index) {
if ( dt.aoData[index]._DTTT_selected ) {
$(tr).addClass( that.classes.select.row );
}
}, 'TableTools-SelectAll' );
}
},
/**
* Select rows
* @param {*} src Rows to select - see _fnSelectData for a description of valid inputs
* @private
*/
"_fnRowSelect": function ( src, e )
{
var
that = this,
data = this._fnSelectData( src ),
firstTr = data.length===0 ? null : data[0].nTr,
anSelected = [],
i, len;
// Get all the rows that will be selected
for ( i=0, len=data.length ; i<len ; i++ )
{
if ( data[i].nTr )
{
anSelected.push( data[i].nTr );
}
}
// User defined pre-selection function
if ( this.s.select.preRowSelect !== null && !this.s.select.preRowSelect.call(this, e, anSelected, true) )
{
return;
}
// Mark them as selected
for ( i=0, len=data.length ; i<len ; i++ )
{
data[i]._DTTT_selected = true;
if ( data[i].nTr )
{
$(data[i].nTr).addClass( that.classes.select.row );
}
}
// Post-selection function
if ( this.s.select.postSelected !== null )
{
this.s.select.postSelected.call( this, anSelected );
}
TableTools._fnEventDispatch( this, 'select', anSelected, true );
},
/**
* Deselect rows
* @param {*} src Rows to deselect - see _fnSelectData for a description of valid inputs
* @private
*/
"_fnRowDeselect": function ( src, e )
{
var
that = this,
data = this._fnSelectData( src ),
firstTr = data.length===0 ? null : data[0].nTr,
anDeselectedTrs = [],
i, len;
// Get all the rows that will be deselected
for ( i=0, len=data.length ; i<len ; i++ )
{
if ( data[i].nTr )
{
anDeselectedTrs.push( data[i].nTr );
}
}
// User defined pre-selection function
if ( this.s.select.preRowSelect !== null && !this.s.select.preRowSelect.call(this, e, anDeselectedTrs, false) )
{
return;
}
// Mark them as deselected
for ( i=0, len=data.length ; i<len ; i++ )
{
data[i]._DTTT_selected = false;
if ( data[i].nTr )
{
$(data[i].nTr).removeClass( that.classes.select.row );
}
}
// Post-deselection function
if ( this.s.select.postDeselected !== null )
{
this.s.select.postDeselected.call( this, anDeselectedTrs );
}
TableTools._fnEventDispatch( this, 'select', anDeselectedTrs, false );
},
/**
* Take a data source for row selection and convert it into aoData points for the DT
* @param {*} src Can be a single DOM TR node, an array of TR nodes (including a
* a jQuery object), a single aoData point from DataTables, an array of aoData
* points or an array of aoData indexes
* @returns {array} An array of aoData points
*/
"_fnSelectData": function ( src )
{
var out = [], pos, i, iLen;
if ( src.nodeName )
{
// Single node
pos = this.s.dt.oInstance.fnGetPosition( src );
out.push( this.s.dt.aoData[pos] );
}
else if ( typeof src.length !== 'undefined' )
{
// jQuery object or an array of nodes, or aoData points
for ( i=0, iLen=src.length ; i<iLen ; i++ )
{
if ( src[i].nodeName )
{
pos = this.s.dt.oInstance.fnGetPosition( src[i] );
out.push( this.s.dt.aoData[pos] );
}
else if ( typeof src[i] === 'number' )
{
out.push( this.s.dt.aoData[ src[i] ] );
}
else
{
out.push( src[i] );
}
}
return out;
}
else
{
// A single aoData point
out.push( src );
}
return out;
},
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Text button functions
*/
/**
* Configure a text based button for interaction events
* @method _fnTextConfig
* @param {Node} nButton Button element which is being considered
* @param {Object} oConfig Button configuration object
* @returns void
* @private
*/
"_fnTextConfig": function ( nButton, oConfig )
{
var that = this;
if ( oConfig.fnInit !== null )
{
oConfig.fnInit.call( this, nButton, oConfig );
}
if ( oConfig.sToolTip !== "" )
{
nButton.title = oConfig.sToolTip;
}
$(nButton).hover( function () {
if ( oConfig.fnMouseover !== null )
{
oConfig.fnMouseover.call( this, nButton, oConfig, null );
}
}, function () {
if ( oConfig.fnMouseout !== null )
{
oConfig.fnMouseout.call( this, nButton, oConfig, null );
}
} );
if ( oConfig.fnSelect !== null )
{
TableTools._fnEventListen( this, 'select', function (n) {
oConfig.fnSelect.call( that, nButton, oConfig, n );
} );
}
$(nButton).click( function (e) {
//e.preventDefault();
if ( oConfig.fnClick !== null )
{
oConfig.fnClick.call( that, nButton, oConfig, null, e );
}
/* Provide a complete function to match the behaviour of the flash elements */
if ( oConfig.fnComplete !== null )
{
oConfig.fnComplete.call( that, nButton, oConfig, null, null );
}
that._fnCollectionHide( nButton, oConfig );
} );
},
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Flash button functions
*/
/**
* Check if the Flash plug-in is available
* @method _fnHasFlash
* @returns {boolean} `true` if Flash available, `false` otherwise
* @private
*/
"_fnHasFlash": function ()
{
try {
var fo = new ActiveXObject('ShockwaveFlash.ShockwaveFlash');
if (fo) {
return true;
}
}
catch (e) {
if (
navigator.mimeTypes &&
navigator.mimeTypes['application/x-shockwave-flash'] !== undefined &&
navigator.mimeTypes['application/x-shockwave-flash'].enabledPlugin
) {
return true;
}
}
return false;
},
/**
* Configure a flash based button for interaction events
* @method _fnFlashConfig
* @param {Node} nButton Button element which is being considered
* @param {o} oConfig Button configuration object
* @returns void
* @private
*/
"_fnFlashConfig": function ( nButton, oConfig )
{
var that = this;
var flash = new ZeroClipboard_TableTools.Client();
if ( oConfig.fnInit !== null )
{
oConfig.fnInit.call( this, nButton, oConfig );
}
flash.setHandCursor( true );
if ( oConfig.sAction == "flash_save" )
{
flash.setAction( 'save' );
flash.setCharSet( (oConfig.sCharSet=="utf16le") ? 'UTF16LE' : 'UTF8' );
flash.setBomInc( oConfig.bBomInc );
flash.setFileName( oConfig.sFileName.replace('*', this.fnGetTitle(oConfig)) );
}
else if ( oConfig.sAction == "flash_pdf" )
{
flash.setAction( 'pdf' );
flash.setFileName( oConfig.sFileName.replace('*', this.fnGetTitle(oConfig)) );
}
else
{
flash.setAction( 'copy' );
}
flash.addEventListener('mouseOver', function(client) {
if ( oConfig.fnMouseover !== null )
{
oConfig.fnMouseover.call( that, nButton, oConfig, flash );
}
} );
flash.addEventListener('mouseOut', function(client) {
if ( oConfig.fnMouseout !== null )
{
oConfig.fnMouseout.call( that, nButton, oConfig, flash );
}
} );
flash.addEventListener('mouseDown', function(client) {
if ( oConfig.fnClick !== null )
{
oConfig.fnClick.call( that, nButton, oConfig, flash );
}
} );
flash.addEventListener('complete', function (client, text) {
if ( oConfig.fnComplete !== null )
{
oConfig.fnComplete.call( that, nButton, oConfig, flash, text );
}
that._fnCollectionHide( nButton, oConfig );
} );
this._fnFlashGlue( flash, nButton, oConfig.sToolTip );
},
/**
* Wait until the id is in the DOM before we "glue" the swf. Note that this function will call
* itself (using setTimeout) until it completes successfully
* @method _fnFlashGlue
* @param {Object} clip Zero clipboard object
* @param {Node} node node to glue swf to
* @param {String} text title of the flash movie
* @returns void
* @private
*/
"_fnFlashGlue": function ( flash, node, text )
{
var that = this;
var id = node.getAttribute('id');
if ( document.getElementById(id) )
{
flash.glue( node, text );
}
else
{
setTimeout( function () {
that._fnFlashGlue( flash, node, text );
}, 100 );
}
},
/**
* Set the text for the flash clip to deal with
*
* This function is required for large information sets. There is a limit on the
* amount of data that can be transferred between Javascript and Flash in a single call, so
* we use this method to build up the text in Flash by sending over chunks. It is estimated
* that the data limit is around 64k, although it is undocumented, and appears to be different
* between different flash versions. We chunk at 8KiB.
* @method _fnFlashSetText
* @param {Object} clip the ZeroClipboard object
* @param {String} sData the data to be set
* @returns void
* @private
*/
"_fnFlashSetText": function ( clip, sData )
{
var asData = this._fnChunkData( sData, 8192 );
clip.clearText();
for ( var i=0, iLen=asData.length ; i<iLen ; i++ )
{
clip.appendText( asData[i] );
}
},
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Data retrieval functions
*/
/**
* Convert the mixed columns variable into a boolean array the same size as the columns, which
* indicates which columns we want to include
* @method _fnColumnTargets
* @param {String|Array} mColumns The columns to be included in data retrieval. If a string
* then it can take the value of "visible" or "hidden" (to include all visible or
* hidden columns respectively). Or an array of column indexes
* @returns {Array} A boolean array the length of the columns of the table, which each value
* indicating if the column is to be included or not
* @private
*/
"_fnColumnTargets": function ( mColumns )
{
var aColumns = [];
var dt = this.s.dt;
var i, iLen;
var columns = dt.aoColumns;
var columnCount = columns.length;
if ( typeof mColumns == "function" )
{
var a = mColumns.call( this, dt );
for ( i=0, iLen=columnCount ; i<iLen ; i++ )
{
aColumns.push( $.inArray( i, a ) !== -1 ? true : false );
}
}
else if ( typeof mColumns == "object" )
{
for ( i=0, iLen=columnCount ; i<iLen ; i++ )
{
aColumns.push( false );
}
for ( i=0, iLen=mColumns.length ; i<iLen ; i++ )
{
aColumns[ mColumns[i] ] = true;
}
}
else if ( mColumns == "visible" )
{
for ( i=0, iLen=columnCount ; i<iLen ; i++ )
{
aColumns.push( columns[i].bVisible ? true : false );
}
}
else if ( mColumns == "hidden" )
{
for ( i=0, iLen=columnCount ; i<iLen ; i++ )
{
aColumns.push( columns[i].bVisible ? false : true );
}
}
else if ( mColumns == "sortable" )
{
for ( i=0, iLen=columnCount ; i<iLen ; i++ )
{
aColumns.push( columns[i].bSortable ? true : false );
}
}
else /* all */
{
for ( i=0, iLen=columnCount ; i<iLen ; i++ )
{
aColumns.push( true );
}
}
return aColumns;
},
/**
* New line character(s) depend on the platforms
* @method method
* @param {Object} oConfig Button configuration object - only interested in oConfig.sNewLine
* @returns {String} Newline character
*/
"_fnNewline": function ( oConfig )
{
if ( oConfig.sNewLine == "auto" )
{
return navigator.userAgent.match(/Windows/) ? "\r\n" : "\n";
}
else
{
return oConfig.sNewLine;
}
},
/**
* Get data from DataTables' internals and format it for output
* @method _fnGetDataTablesData
* @param {Object} oConfig Button configuration object
* @param {String} oConfig.sFieldBoundary Field boundary for the data cells in the string
* @param {String} oConfig.sFieldSeperator Field separator for the data cells
* @param {String} oConfig.sNewline New line options
* @param {Mixed} oConfig.mColumns Which columns should be included in the output
* @param {Boolean} oConfig.bHeader Include the header
* @param {Boolean} oConfig.bFooter Include the footer
* @param {Boolean} oConfig.bSelectedOnly Include only the selected rows in the output
* @returns {String} Concatenated string of data
* @private
*/
"_fnGetDataTablesData": function ( oConfig )
{
var i, iLen, j, jLen;
var aRow, aData=[], sLoopData='', arr;
var dt = this.s.dt, tr, child;
var regex = new RegExp(oConfig.sFieldBoundary, "g"); /* Do it here for speed */
var aColumnsInc = this._fnColumnTargets( oConfig.mColumns );
var bSelectedOnly = (typeof oConfig.bSelectedOnly != 'undefined') ? oConfig.bSelectedOnly : false;
/*
* Header
*/
if ( oConfig.bHeader )
{
aRow = [];
for ( i=0, iLen=dt.aoColumns.length ; i<iLen ; i++ )
{
if ( aColumnsInc[i] )
{
sLoopData = dt.aoColumns[i].sTitle.replace(/\n/g," ").replace( /<.*?>/g, "" ).replace(/^\s+|\s+$/g,"");
sLoopData = this._fnHtmlDecode( sLoopData );
aRow.push( this._fnBoundData( sLoopData, oConfig.sFieldBoundary, regex ) );
}
}
aData.push( aRow.join(oConfig.sFieldSeperator) );
}
bSelectedOnly = true;
/*
* Body
*/
var aDataIndex;
var aSelected = this.fnGetSelectedIndexes();
bSelectedOnly = this.s.select.type !== "none" && bSelectedOnly && aSelected.length !== 0;
if ( bSelectedOnly ) {
// Use the selected indexes
aDataIndex = aSelected;
}
else if ( DataTable.Api ) {
// 1.10+ style
aDataIndex = new DataTable.Api( dt )
.rows( oConfig.oSelectorOpts )
.indexes()
.flatten()
.toArray();
}
else {
// 1.9- style
aDataIndex = dt.oInstance
.$('tr', oConfig.oSelectorOpts)
.map( function (id, row) {
return dt.oInstance.fnGetPosition( row );
} )
.get();
}
for ( j=0, jLen=aDataIndex.length ; j<jLen ; j++ )
{
tr = dt.aoData[ aDataIndex[j] ].nTr;
aRow = [];
/* Columns */
for ( i=0, iLen=dt.aoColumns.length ; i<iLen ; i++ )
{
if ( aColumnsInc[i] )
{
/* Convert to strings (with small optimisation) */
var mTypeData = dt.oApi._fnGetCellData( dt, aDataIndex[j], i, 'display' );
if ( oConfig.fnCellRender )
{
sLoopData = oConfig.fnCellRender( mTypeData, i, tr, aDataIndex[j] )+"";
}
else if ( typeof mTypeData == "string" )
{
/* Strip newlines, replace img tags with alt attr. and finally strip html... */
sLoopData = mTypeData.replace(/\n/g," ");
sLoopData =
sLoopData.replace(/<img.*?\s+alt\s*=\s*(?:"([^"]+)"|'([^']+)'|([^\s>]+)).*?>/gi,
'$1$2$3');
sLoopData = sLoopData.replace( /<.*?>/g, "" );
}
else
{
sLoopData = mTypeData+"";
}
/* Trim and clean the data */
sLoopData = sLoopData.replace(/^\s+/, '').replace(/\s+$/, '');
sLoopData = this._fnHtmlDecode( sLoopData );
/* Bound it and add it to the total data */
aRow.push( this._fnBoundData( sLoopData, oConfig.sFieldBoundary, regex ) );
}
}
aData.push( aRow.join(oConfig.sFieldSeperator) );
/* Details rows from fnOpen */
if ( oConfig.bOpenRows )
{
arr = $.grep(dt.aoOpenRows, function(o) { return o.nParent === tr; });
if ( arr.length === 1 )
{
sLoopData = this._fnBoundData( $('td', arr[0].nTr).html(), oConfig.sFieldBoundary, regex );
aData.push( sLoopData );
}
}
}
/*
* Footer
*/
if ( oConfig.bFooter && dt.nTFoot !== null )
{
aRow = [];
for ( i=0, iLen=dt.aoColumns.length ; i<iLen ; i++ )
{
if ( aColumnsInc[i] && dt.aoColumns[i].nTf !== null )
{
sLoopData = dt.aoColumns[i].nTf.innerHTML.replace(/\n/g," ").replace( /<.*?>/g, "" );
sLoopData = this._fnHtmlDecode( sLoopData );
aRow.push( this._fnBoundData( sLoopData, oConfig.sFieldBoundary, regex ) );
}
}
aData.push( aRow.join(oConfig.sFieldSeperator) );
}
var _sLastData = aData.join( this._fnNewline(oConfig) );
return _sLastData;
},
/**
* Wrap data up with a boundary string
* @method _fnBoundData
* @param {String} sData data to bound
* @param {String} sBoundary bounding char(s)
* @param {RegExp} regex search for the bounding chars - constructed outside for efficiency
* in the loop
* @returns {String} bound data
* @private
*/
"_fnBoundData": function ( sData, sBoundary, regex )
{
if ( sBoundary === "" )
{
return sData;
}
else
{
return sBoundary + sData.replace(regex, sBoundary+sBoundary) + sBoundary;
}
},
/**
* Break a string up into an array of smaller strings
* @method _fnChunkData
* @param {String} sData data to be broken up
* @param {Int} iSize chunk size
* @returns {Array} String array of broken up text
* @private
*/
"_fnChunkData": function ( sData, iSize )
{
var asReturn = [];
var iStrlen = sData.length;
for ( var i=0 ; i<iStrlen ; i+=iSize )
{
if ( i+iSize < iStrlen )
{
asReturn.push( sData.substring( i, i+iSize ) );
}
else
{
asReturn.push( sData.substring( i, iStrlen ) );
}
}
return asReturn;
},
/**
* Decode HTML entities
* @method _fnHtmlDecode
* @param {String} sData encoded string
* @returns {String} decoded string
* @private
*/
"_fnHtmlDecode": function ( sData )
{
if ( sData.indexOf('&') === -1 )
{
return sData;
}
var n = document.createElement('div');
return sData.replace( /&([^\s]*?);/g, function( match, match2 ) {
if ( match.substr(1, 1) === '#' )
{
return String.fromCharCode( Number(match2.substr(1)) );
}
else
{
n.innerHTML = match;
return n.childNodes[0].nodeValue;
}
} );
},
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Printing functions
*/
/**
* Show print display
* @method _fnPrintStart
* @param {Event} e Event object
* @param {Object} oConfig Button configuration object
* @returns void
* @private
*/
"_fnPrintStart": function ( oConfig )
{
var that = this;
var oSetDT = this.s.dt;
/* Parse through the DOM hiding everything that isn't needed for the table */
this._fnPrintHideNodes( oSetDT.nTable );
/* Show the whole table */
this.s.print.saveStart = oSetDT._iDisplayStart;
this.s.print.saveLength = oSetDT._iDisplayLength;
if ( oConfig.bShowAll )
{
oSetDT._iDisplayStart = 0;
oSetDT._iDisplayLength = -1;
if ( oSetDT.oApi._fnCalculateEnd ) {
oSetDT.oApi._fnCalculateEnd( oSetDT );
}
oSetDT.oApi._fnDraw( oSetDT );
}
/* Adjust the display for scrolling which might be done by DataTables */
if ( oSetDT.oScroll.sX !== "" || oSetDT.oScroll.sY !== "" )
{
this._fnPrintScrollStart( oSetDT );
// If the table redraws while in print view, the DataTables scrolling
// setup would hide the header, so we need to readd it on draw
$(this.s.dt.nTable).bind('draw.DTTT_Print', function () {
that._fnPrintScrollStart( oSetDT );
} );
}
/* Remove the other DataTables feature nodes - but leave the table! and info div */
var anFeature = oSetDT.aanFeatures;
for ( var cFeature in anFeature )
{
if ( cFeature != 'i' && cFeature != 't' && cFeature.length == 1 )
{
for ( var i=0, iLen=anFeature[cFeature].length ; i<iLen ; i++ )
{
this.dom.print.hidden.push( {
"node": anFeature[cFeature][i],
"display": "block"
} );
anFeature[cFeature][i].style.display = "none";
}
}
}
/* Print class can be used for styling */
$(document.body).addClass( this.classes.print.body );
/* Show information message to let the user know what is happening */
if ( oConfig.sInfo !== "" )
{
this.fnInfo( oConfig.sInfo, 3000 );
}
/* Add a message at the top of the page */
if ( oConfig.sMessage )
{
$('<div/>')
.addClass( this.classes.print.message )
.html( oConfig.sMessage )
.prependTo( 'body' );
}
/* Cache the scrolling and the jump to the top of the page */
this.s.print.saveScroll = $(window).scrollTop();
window.scrollTo( 0, 0 );
/* Bind a key event listener to the document for the escape key -
* it is removed in the callback
*/
$(document).bind( "keydown.DTTT", function(e) {
/* Only interested in the escape key */
if ( e.keyCode == 27 )
{
e.preventDefault();
that._fnPrintEnd.call( that, e );
}
} );
},
/**
* Printing is finished, resume normal display
* @method _fnPrintEnd
* @param {Event} e Event object
* @returns void
* @private
*/
"_fnPrintEnd": function ( e )
{
var that = this;
var oSetDT = this.s.dt;
var oSetPrint = this.s.print;
var oDomPrint = this.dom.print;
/* Show all hidden nodes */
this._fnPrintShowNodes();
/* Restore DataTables' scrolling */
if ( oSetDT.oScroll.sX !== "" || oSetDT.oScroll.sY !== "" )
{
$(this.s.dt.nTable).unbind('draw.DTTT_Print');
this._fnPrintScrollEnd();
}
/* Restore the scroll */
window.scrollTo( 0, oSetPrint.saveScroll );
/* Drop the print message */
$('div.'+this.classes.print.message).remove();
/* Styling class */
$(document.body).removeClass( 'DTTT_Print' );
/* Restore the table length */
oSetDT._iDisplayStart = oSetPrint.saveStart;
oSetDT._iDisplayLength = oSetPrint.saveLength;
if ( oSetDT.oApi._fnCalculateEnd ) {
oSetDT.oApi._fnCalculateEnd( oSetDT );
}
oSetDT.oApi._fnDraw( oSetDT );
$(document).unbind( "keydown.DTTT" );
},
/**
* Take account of scrolling in DataTables by showing the full table
* @returns void
* @private
*/
"_fnPrintScrollStart": function ()
{
var
oSetDT = this.s.dt,
nScrollHeadInner = oSetDT.nScrollHead.getElementsByTagName('div')[0],
nScrollHeadTable = nScrollHeadInner.getElementsByTagName('table')[0],
nScrollBody = oSetDT.nTable.parentNode,
nTheadSize, nTfootSize;
/* Copy the header in the thead in the body table, this way we show one single table when
* in print view. Note that this section of code is more or less verbatim from DT 1.7.0
*/
nTheadSize = oSetDT.nTable.getElementsByTagName('thead');
if ( nTheadSize.length > 0 )
{
oSetDT.nTable.removeChild( nTheadSize[0] );
}
if ( oSetDT.nTFoot !== null )
{
nTfootSize = oSetDT.nTable.getElementsByTagName('tfoot');
if ( nTfootSize.length > 0 )
{
oSetDT.nTable.removeChild( nTfootSize[0] );
}
}
nTheadSize = oSetDT.nTHead.cloneNode(true);
oSetDT.nTable.insertBefore( nTheadSize, oSetDT.nTable.childNodes[0] );
if ( oSetDT.nTFoot !== null )
{
nTfootSize = oSetDT.nTFoot.cloneNode(true);
oSetDT.nTable.insertBefore( nTfootSize, oSetDT.nTable.childNodes[1] );
}
/* Now adjust the table's viewport so we can actually see it */
if ( oSetDT.oScroll.sX !== "" )
{
oSetDT.nTable.style.width = $(oSetDT.nTable).outerWidth()+"px";
nScrollBody.style.width = $(oSetDT.nTable).outerWidth()+"px";
nScrollBody.style.overflow = "visible";
}
if ( oSetDT.oScroll.sY !== "" )
{
nScrollBody.style.height = $(oSetDT.nTable).outerHeight()+"px";
nScrollBody.style.overflow = "visible";
}
},
/**
* Take account of scrolling in DataTables by showing the full table. Note that the redraw of
* the DataTable that we do will actually deal with the majority of the hard work here
* @returns void
* @private
*/
"_fnPrintScrollEnd": function ()
{
var
oSetDT = this.s.dt,
nScrollBody = oSetDT.nTable.parentNode;
if ( oSetDT.oScroll.sX !== "" )
{
nScrollBody.style.width = oSetDT.oApi._fnStringToCss( oSetDT.oScroll.sX );
nScrollBody.style.overflow = "auto";
}
if ( oSetDT.oScroll.sY !== "" )
{
nScrollBody.style.height = oSetDT.oApi._fnStringToCss( oSetDT.oScroll.sY );
nScrollBody.style.overflow = "auto";
}
},
/**
* Resume the display of all TableTools hidden nodes
* @method _fnPrintShowNodes
* @returns void
* @private
*/
"_fnPrintShowNodes": function ( )
{
var anHidden = this.dom.print.hidden;
for ( var i=0, iLen=anHidden.length ; i<iLen ; i++ )
{
anHidden[i].node.style.display = anHidden[i].display;
}
anHidden.splice( 0, anHidden.length );
},
/**
* Hide nodes which are not needed in order to display the table. Note that this function is
* recursive
* @method _fnPrintHideNodes
* @param {Node} nNode Element which should be showing in a 'print' display
* @returns void
* @private
*/
"_fnPrintHideNodes": function ( nNode )
{
var anHidden = this.dom.print.hidden;
var nParent = nNode.parentNode;
var nChildren = nParent.childNodes;
for ( var i=0, iLen=nChildren.length ; i<iLen ; i++ )
{
if ( nChildren[i] != nNode && nChildren[i].nodeType == 1 )
{
/* If our node is shown (don't want to show nodes which were previously hidden) */
var sDisplay = $(nChildren[i]).css("display");
if ( sDisplay != "none" )
{
/* Cache the node and it's previous state so we can restore it */
anHidden.push( {
"node": nChildren[i],
"display": sDisplay
} );
nChildren[i].style.display = "none";
}
}
}
if ( nParent.nodeName.toUpperCase() != "BODY" )
{
this._fnPrintHideNodes( nParent );
}
}
};
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Static variables
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
/**
* Store of all instances that have been created of TableTools, so one can look up other (when
* there is need of a master)
* @property _aInstances
* @type Array
* @default []
* @private
*/
TableTools._aInstances = [];
/**
* Store of all listeners and their callback functions
* @property _aListeners
* @type Array
* @default []
*/
TableTools._aListeners = [];
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Static methods
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
/**
* Get an array of all the master instances
* @method fnGetMasters
* @returns {Array} List of master TableTools instances
* @static
*/
TableTools.fnGetMasters = function ()
{
var a = [];
for ( var i=0, iLen=TableTools._aInstances.length ; i<iLen ; i++ )
{
if ( TableTools._aInstances[i].s.master )
{
a.push( TableTools._aInstances[i] );
}
}
return a;
};
/**
* Get the master instance for a table node (or id if a string is given)
* @method fnGetInstance
* @returns {Object} ID of table OR table node, for which we want the TableTools instance
* @static
*/
TableTools.fnGetInstance = function ( node )
{
if ( typeof node != 'object' )
{
node = document.getElementById(node);
}
for ( var i=0, iLen=TableTools._aInstances.length ; i<iLen ; i++ )
{
if ( TableTools._aInstances[i].s.master && TableTools._aInstances[i].dom.table == node )
{
return TableTools._aInstances[i];
}
}
return null;
};
/**
* Add a listener for a specific event
* @method _fnEventListen
* @param {Object} that Scope of the listening function (i.e. 'this' in the caller)
* @param {String} type Event type
* @param {Function} fn Function
* @returns void
* @private
* @static
*/
TableTools._fnEventListen = function ( that, type, fn )
{
TableTools._aListeners.push( {
"that": that,
"type": type,
"fn": fn
} );
};
/**
* An event has occurred - look up every listener and fire it off. We check that the event we are
* going to fire is attached to the same table (using the table node as reference) before firing
* @method _fnEventDispatch
* @param {Object} that Scope of the listening function (i.e. 'this' in the caller)
* @param {String} type Event type
* @param {Node} node Element that the event occurred on (may be null)
* @param {boolean} [selected] Indicate if the node was selected (true) or deselected (false)
* @returns void
* @private
* @static
*/
TableTools._fnEventDispatch = function ( that, type, node, selected )
{
var listeners = TableTools._aListeners;
for ( var i=0, iLen=listeners.length ; i<iLen ; i++ )
{
if ( that.dom.table == listeners[i].that.dom.table && listeners[i].type == type )
{
listeners[i].fn( node, selected );
}
}
};
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Constants
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
TableTools.buttonBase = {
// Button base
"sAction": "text",
"sTag": "default",
"sLinerTag": "default",
"sButtonClass": "DTTT_button_text",
"sButtonText": "Button text",
"sTitle": "",
"sToolTip": "",
// Common button specific options
"sCharSet": "utf8",
"bBomInc": false,
"sFileName": "*.csv",
"sFieldBoundary": "",
"sFieldSeperator": "\t",
"sNewLine": "auto",
"mColumns": "all", /* "all", "visible", "hidden" or array of column integers */
"bHeader": true,
"bFooter": true,
"bOpenRows": false,
"bSelectedOnly": false,
"oSelectorOpts": undefined, // See http://datatables.net/docs/DataTables/1.9.4/#$ for full options
// Callbacks
"fnMouseover": null,
"fnMouseout": null,
"fnClick": null,
"fnSelect": null,
"fnComplete": null,
"fnInit": null,
"fnCellRender": null
};
/**
* @namespace Default button configurations
*/
TableTools.BUTTONS = {
"csv": $.extend( {}, TableTools.buttonBase, {
"sAction": "flash_save",
"sButtonClass": "DTTT_button_csv",
"sButtonText": "CSV",
"sFieldBoundary": '"',
"sFieldSeperator": ",",
"fnClick": function( nButton, oConfig, flash ) {
this.fnSetText( flash, this.fnGetTableData(oConfig) );
}
} ),
"xls": $.extend( {}, TableTools.buttonBase, {
"sAction": "flash_save",
"sCharSet": "utf16le",
"bBomInc": true,
"sButtonClass": "DTTT_button_xls",
"sButtonText": "Excel",
"fnClick": function( nButton, oConfig, flash ) {
this.fnSetText( flash, this.fnGetTableData(oConfig) );
}
} ),
"copy": $.extend( {}, TableTools.buttonBase, {
"sAction": "flash_copy",
"sButtonClass": "DTTT_button_copy",
"sButtonText": "Copy",
"fnClick": function( nButton, oConfig, flash ) {
this.fnSetText( flash, this.fnGetTableData(oConfig) );
},
"fnComplete": function(nButton, oConfig, flash, text) {
var lines = text.split('\n').length;
if (oConfig.bHeader) lines--;
if (this.s.dt.nTFoot !== null && oConfig.bFooter) lines--;
var plural = (lines==1) ? "" : "s";
this.fnInfo( '<h6>Table copied</h6>'+
'<p>Copied '+lines+' row'+plural+' to the clipboard.</p>',
1500
);
}
} ),
"pdf": $.extend( {}, TableTools.buttonBase, {
"sAction": "flash_pdf",
"sNewLine": "\n",
"sFileName": "*.pdf",
"sButtonClass": "DTTT_button_pdf",
"sButtonText": "PDF",
"sPdfOrientation": "portrait",
"sPdfSize": "A4",
"sPdfMessage": "",
"fnClick": function( nButton, oConfig, flash ) {
this.fnSetText( flash,
"title:"+ this.fnGetTitle(oConfig) +"\n"+
"message:"+ oConfig.sPdfMessage +"\n"+
"colWidth:"+ this.fnCalcColRatios(oConfig) +"\n"+
"orientation:"+ oConfig.sPdfOrientation +"\n"+
"size:"+ oConfig.sPdfSize +"\n"+
"--/TableToolsOpts--\n" +
this.fnGetTableData(oConfig)
);
}
} ),
"print": $.extend( {}, TableTools.buttonBase, {
"sInfo": "<h6>Print view</h6><p>Please use your browser's print function to "+
"print this table. Press escape when finished.</p>",
"sMessage": null,
"bShowAll": true,
"sToolTip": "View print view",
"sButtonClass": "DTTT_button_print",
"sButtonText": "Print",
"fnClick": function ( nButton, oConfig ) {
this.fnPrint( true, oConfig );
}
} ),
"text": $.extend( {}, TableTools.buttonBase ),
"select": $.extend( {}, TableTools.buttonBase, {
"sButtonText": "Select button",
"fnSelect": function( nButton, oConfig ) {
if ( this.fnGetSelected().length !== 0 ) {
$(nButton).removeClass( this.classes.buttons.disabled );
} else {
$(nButton).addClass( this.classes.buttons.disabled );
}
},
"fnInit": function( nButton, oConfig ) {
$(nButton).addClass( this.classes.buttons.disabled );
}
} ),
"select_single": $.extend( {}, TableTools.buttonBase, {
"sButtonText": "Select button",
"fnSelect": function( nButton, oConfig ) {
var iSelected = this.fnGetSelected().length;
if ( iSelected == 1 ) {
$(nButton).removeClass( this.classes.buttons.disabled );
} else {
$(nButton).addClass( this.classes.buttons.disabled );
}
},
"fnInit": function( nButton, oConfig ) {
$(nButton).addClass( this.classes.buttons.disabled );
}
} ),
"select_all": $.extend( {}, TableTools.buttonBase, {
"sButtonText": "Select all",
"fnClick": function( nButton, oConfig ) {
this.fnSelectAll();
},
"fnSelect": function( nButton, oConfig ) {
if ( this.fnGetSelected().length == this.s.dt.fnRecordsDisplay() ) {
$(nButton).addClass( this.classes.buttons.disabled );
} else {
$(nButton).removeClass( this.classes.buttons.disabled );
}
}
} ),
"select_none": $.extend( {}, TableTools.buttonBase, {
"sButtonText": "Deselect all",
"fnClick": function( nButton, oConfig ) {
this.fnSelectNone();
},
"fnSelect": function( nButton, oConfig ) {
if ( this.fnGetSelected().length !== 0 ) {
$(nButton).removeClass( this.classes.buttons.disabled );
} else {
$(nButton).addClass( this.classes.buttons.disabled );
}
},
"fnInit": function( nButton, oConfig ) {
$(nButton).addClass( this.classes.buttons.disabled );
}
} ),
"ajax": $.extend( {}, TableTools.buttonBase, {
"sAjaxUrl": "/xhr.php",
"sButtonText": "Ajax button",
"fnClick": function( nButton, oConfig ) {
var sData = this.fnGetTableData(oConfig);
$.ajax( {
"url": oConfig.sAjaxUrl,
"data": [
{ "name": "tableData", "value": sData }
],
"success": oConfig.fnAjaxComplete,
"dataType": "json",
"type": "POST",
"cache": false,
"error": function () {
alert( "Error detected when sending table data to server" );
}
} );
},
"fnAjaxComplete": function( json ) {
alert( 'Ajax complete' );
}
} ),
"div": $.extend( {}, TableTools.buttonBase, {
"sAction": "div",
"sTag": "div",
"sButtonClass": "DTTT_nonbutton",
"sButtonText": "Text button"
} ),
"collection": $.extend( {}, TableTools.buttonBase, {
"sAction": "collection",
"sButtonClass": "DTTT_button_collection",
"sButtonText": "Collection",
"fnClick": function( nButton, oConfig ) {
this._fnCollectionShow(nButton, oConfig);
}
} )
};
/*
* on* callback parameters:
* 1. node - button element
* 2. object - configuration object for this button
* 3. object - ZeroClipboard reference (flash button only)
* 4. string - Returned string from Flash (flash button only - and only on 'complete')
*/
// Alias to match the other plug-ins styling
TableTools.buttons = TableTools.BUTTONS;
/**
* @namespace Classes used by TableTools - allows the styles to be override easily.
* Note that when TableTools initialises it will take a copy of the classes object
* and will use its internal copy for the remainder of its run time.
*/
TableTools.classes = {
"container": "DTTT_container",
"buttons": {
"normal": "DTTT_button",
"disabled": "DTTT_disabled"
},
"collection": {
"container": "DTTT_collection",
"background": "DTTT_collection_background",
"buttons": {
"normal": "DTTT_button",
"disabled": "DTTT_disabled"
}
},
"select": {
"table": "DTTT_selectable",
"row": "DTTT_selected selected"
},
"print": {
"body": "DTTT_Print",
"info": "DTTT_print_info",
"message": "DTTT_PrintMessage"
}
};
/**
* @namespace ThemeRoller classes - built in for compatibility with DataTables'
* bJQueryUI option.
*/
TableTools.classes_themeroller = {
"container": "DTTT_container ui-buttonset ui-buttonset-multi",
"buttons": {
"normal": "DTTT_button ui-button ui-state-default"
},
"collection": {
"container": "DTTT_collection ui-buttonset ui-buttonset-multi"
}
};
/**
* @namespace TableTools default settings for initialisation
*/
TableTools.DEFAULTS = {
"sSwfPath": "../swf/copy_csv_xls_pdf.swf",
"sRowSelect": "none",
"sRowSelector": "tr",
"sSelectedClass": null,
"fnPreRowSelect": null,
"fnRowSelected": null,
"fnRowDeselected": null,
"aButtons": [ "copy", "csv", "xls", "pdf", "print" ],
"oTags": {
"container": "div",
"button": "a", // We really want to use buttons here, but Firefox and IE ignore the
// click on the Flash element in the button (but not mouse[in|out]).
"liner": "span",
"collection": {
"container": "div",
"button": "a",
"liner": "span"
}
}
};
// Alias to match the other plug-ins
TableTools.defaults = TableTools.DEFAULTS;
/**
* Name of this class
* @constant CLASS
* @type String
* @default TableTools
*/
TableTools.prototype.CLASS = "TableTools";
/**
* TableTools version
* @constant VERSION
* @type String
* @default See code
*/
TableTools.version = "2.2.3";
// DataTables 1.10 API
//
// This will be extended in a big way in in TableTools 3 to provide API methods
// such as rows().select() and rows.selected() etc, but for the moment the
// tabletools() method simply returns the instance.
if ( $.fn.dataTable.Api ) {
$.fn.dataTable.Api.register( 'tabletools()', function () {
var tt = null;
if ( this.context.length > 0 ) {
tt = TableTools.fnGetInstance( this.context[0].nTable );
}
return tt;
} );
}
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Initialisation
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
/*
* Register a new feature with DataTables
*/
if ( typeof $.fn.dataTable == "function" &&
typeof $.fn.dataTableExt.fnVersionCheck == "function" &&
$.fn.dataTableExt.fnVersionCheck('1.9.0') )
{
$.fn.dataTableExt.aoFeatures.push( {
"fnInit": function( oDTSettings ) {
var init = oDTSettings.oInit;
var opts = init ?
init.tableTools || init.oTableTools || {} :
{};
return new TableTools( oDTSettings.oInstance, opts ).dom.container;
},
"cFeature": "T",
"sFeature": "TableTools"
} );
}
else
{
alert( "Warning: TableTools requires DataTables 1.9.0 or newer - www.datatables.net/download");
}
$.fn.DataTable.TableTools = TableTools;
})(jQuery, window, document);
/*
* Register a new feature with DataTables
*/
if ( typeof $.fn.dataTable == "function" &&
typeof $.fn.dataTableExt.fnVersionCheck == "function" &&
$.fn.dataTableExt.fnVersionCheck('1.9.0') )
{
$.fn.dataTableExt.aoFeatures.push( {
"fnInit": function( oDTSettings ) {
var oOpts = typeof oDTSettings.oInit.oTableTools != 'undefined' ?
oDTSettings.oInit.oTableTools : {};
var oTT = new TableTools( oDTSettings.oInstance, oOpts );
TableTools._aInstances.push( oTT );
return oTT.dom.container;
},
"cFeature": "T",
"sFeature": "TableTools"
} );
}
else
{
alert( "Warning: TableTools 2 requires DataTables 1.9.0 or newer - www.datatables.net/download");
}
$.fn.dataTable.TableTools = TableTools;
$.fn.DataTable.TableTools = TableTools;
return TableTools;
}; // /factory
// Define as an AMD module if possible
if ( typeof define === 'function' && define.amd ) {
define( ['jquery', 'datatables'], factory );
}
else if ( typeof exports === 'object' ) {
// Node/CommonJS
factory( require('jquery'), require('datatables') );
}
else if ( jQuery && !jQuery.fn.dataTable.TableTools ) {
// Otherwise simply initialise as normal, stopping multiple evaluation
factory( jQuery, jQuery.fn.dataTable );
}
})(window, document); | PypiClean |
/IVisual-alt-0.2.4.tar.gz/IVisual-alt-0.2.4/ivisual/visual.py | from __future__ import print_function, division, absolute_import
import six
import ivisual.crayola as color
import ivisual.materials
import numpy as np
from array import array
from ivisual.rate_control import *
from IPython.display import HTML
from IPython.display import display, display_html, display_javascript
from IPython.display import Javascript
from ipykernel.comm import Comm
from IPython.core.getipython import get_ipython
import time
import math
import uuid
import inspect
from time import clock
import os
from notebook.nbextensions import install_nbextension
import datetime, threading
import collections
import copy
import sys
import weakref
import IPython
from numpy import zeros, random
#import wx
import platform
glowlock = threading.Lock()
class RateKeeper2(RateKeeper):
def __init__(self, interactPeriod=INTERACT_PERIOD, interactFunc=simulateDelay):
self.active = False
self.send = False
self.sz = 0
self.sendcnt = 0
self.rval = 1
super(RateKeeper2, self).__init__(interactPeriod=interactPeriod, interactFunc=interactFunc)
def sendtofrontend(self):
self.active = True
if self.send:
with glowlock:
try:
if (len(baseObj.cmds) > 0):
a = copy.copy(baseObj.cmds)
l = len(a)
baseObj.glow.comm.send(list(a))
a.clear()
while l > 0:
del baseObj.cmds[0]
l -= 1
l = self.sz
req = commcmds[:l]
baseObj.glow.comm.send(req)
finally:
self.send = False
self.sendcnt = 0
self.sz = 0
# Check if events to process from front end
if IPython.__version__ >= '3.0.0' :
kernel = get_ipython().kernel
parent = kernel._parent_header
ident = kernel._parent_ident
kernel.do_one_iteration()
kernel.set_parent(ident, parent)
def __call__(self, maxRate = 100):
if (self.rval != maxRate) and (maxRate >= 1.0):
with glowlock:
self.rval = maxRate
super(RateKeeper2, self).__call__(maxRate)
if sys.version > '3':
long = int
ifunc = simulateDelay(delayAvg = 0.001)
rate = RateKeeper2(interactFunc = ifunc)
display(HTML("""<div id="scene0"><div id="glowscript" class="glowscript"></div></div>"""))
package_dir = os.path.dirname(__file__)
install_nbextension(path = package_dir+"/data/jquery-ui.custom.min.js",overwrite = True,user = True,verbose = 0)
install_nbextension(path = package_dir+"/data/glow.1.1.min.js",overwrite = True,user = True,verbose = 0)
install_nbextension(path = package_dir+"/data/glowcomm.js",overwrite = True,user = True,verbose = 0)
object_registry = {} # GUID -> Instance
callback_registry = {} # GUID -> Callback
_id2obj_dict = weakref.WeakValueDictionary()
def remember(obj):
oid = id(obj)
_id2obj_dict[oid] = obj
return oid
def id2obj(oid):
return _id2obj_dict[oid]
class baseObj(object):
txtime = 0.0
idx = 0
qSize = 500 # default to 500
qTime = 0.034 # default to 0.05
glow = None
cmds = collections.deque()
updtobjs = set()
objCnt = 0
def __init__(self, **kwargs):
guid = str(uuid.uuid4())
object_registry[guid] = self
object.__setattr__(self, 'guid', guid)
object.__setattr__(self, 'idx', baseObj.objCnt)
object.__setattr__(self, 'attrsupdt', set())
object.__setattr__(self, 'oid', remember(self))
if kwargs is not None:
for key, value in six.iteritems(kwargs):
object.__setattr__(self, key, value)
baseObj.incrObjCnt()
if(canvas.get_selected() != None):
canvas.get_selected().objects.append(self)
def delete(self):
baseObj.decrObjCnt()
cmd = {"cmd": "delete", "idx": self.idx}
if (baseObj.glow != None):
baseObj.glow.comm.send([cmd])
else:
self.appendcmd(cmd)
#baseObj.cmds.append(cmd)
def appendcmd(self,cmd):
if (baseObj.glow != None):
#if rate.active and rate.sendcnt < 5:
# baseObj.cmds.append(cmd)
#else:
# baseObj.glow.comm.send([cmd])
baseObj.glow.comm.send([cmd])
else:
baseObj.cmds.append(cmd)
def addattr(self, name):
# New-way to use a lock
with glowlock:
self.attrsupdt.add(name)
baseObj.updtobjs.add(self.oid)
@classmethod
def incrObjCnt(cls):
cls.objCnt += 1
@classmethod
def decrObjCnt(cls):
cls.objCnt -= 1
def __del__(self):
cmd = {"cmd": "delete", "idx": self.idx}
if (baseObj.glow != None):
baseObj.glow.comm.send([cmd])
else:
self.appendcmd(cmd)
#baseObj.cmds.append(cmd)
commcmds = []
for i in range(baseObj.qSize):
commcmds.append({"idx": -1, "attr": 'dummy', "val": 0})
updtobjs2 = set()
next_call = time.time()
def commsend():
global next_call, commcmds, updtobjs2, glowlock, rate
#glowlock.acquire()
with glowlock:
try:
if (baseObj.glow != None):
if (len(baseObj.cmds) > 0) and (not rate.active):
#print("commsend len(baseObj.cmds) > 0")
a = copy.copy(baseObj.cmds)
l = len(a)
baseObj.glow.comm.send(list(a))
a.clear()
while l > 0:
del baseObj.cmds[0]
l -= 1
l = rate.sz if (rate.send == True) else 0
if (l > 0):
rate.sendcnt += 1
thresh = math.ceil(30.0/rate.rval) * 2 + 1
if (rate.sendcnt > thresh ):
rate.send = False
rate.sz = 0
rate.active = False # rate fnc no longer appears to be being called
else:
rate.sendcnt = 0
if(len(updtobjs2) == 0):
updtobjs2 = baseObj.updtobjs.copy()
baseObj.updtobjs.clear()
if l < baseObj.qSize:
while updtobjs2:
oid = updtobjs2.pop()
ob = id2obj(oid)
if (ob is not None) and (hasattr(ob,'attrsupdt')) and (len(ob.attrsupdt) > 0 ):
while ob.attrsupdt:
attr = ob.attrsupdt.pop()
if attr is not None:
attrval = getattr(ob,attr)
if attrval is not None:
if attr in ['axis','pos','up','axis_and_length','center','forward','origin']:
attrvalues = attrval.values()
if attrvalues is not None:
commcmds[l]['idx'] = ob.idx
commcmds[l]['attr'] = attr
commcmds[l]['val'] = attrvalues
elif attr == 'size':
if hasattr(ob,'size_units'):
commcmds[l]['idx'] = ob.idx
commcmds[l]['attr'] = attr
commcmds[l]['val'] = attrval
else:
attrvalues = attrval.values()
if attrvalues is not None:
commcmds[l]['idx'] = ob.idx
commcmds[l]['attr'] = attr
commcmds[l]['val'] = attrvalues
elif attr in ['range','scale']:
attrvalues = attrval[0]
if attrvalues is not None:
commcmds[l]['idx'] = ob.idx
commcmds[l]['attr'] = attr
commcmds[l]['val'] = attrvalues
else:
commcmds[l]['idx'] = ob.idx
commcmds[l]['attr'] = attr
commcmds[l]['val'] = attrval
l += 1
if l >= baseObj.qSize:
if (len(ob.attrsupdt) > 0):
updtobjs2.add(ob.oid)
#ob.attrsupdt.add(attr)
#baseObj.updtobjs.add(ob.oid)
#baseObj.updtobjs.append(ob.oid)
"""
if not rate.active:
req = commcmds[:l]
baseObj.glow.comm.send(req)
else:
rate.sz = l
rate.send = True
"""
break
if l >= baseObj.qSize:
#l = 0
break
if l > 0:
if not rate.active:
l = l if (l <= baseObj.qSize) else baseObj.qSize
baseObj.glow.comm.send(commcmds[:l])
else:
rate.sz = l if (l <= baseObj.qSize) else baseObj.qSize
rate.send = True
finally:
next_call = next_call+0.03333
tmr = next_call - time.time()
if tmr < 0.0:
tmr = 0.03333
next_call = time.time()+tmr
threading.Timer(tmr, commsend ).start()
commsend()
class AllMyFields(object):
def __init__(self, dictionary):
for k, v in dictionary.items():
setattr(self, k, v)
class GlowWidget(object):
def __init__(self, comm, msg):
self.comm = comm
self.comm.on_msg(self.handle_msg)
self.comm.on_close(self.handle_close)
baseObj.glow = self
#def handle_msg(self, data):
def handle_msg(self, msg):
data = msg['content']['data']
if 'callback' in data:
guid = data['callback']
callback = callback_registry[guid]
args = data['arguments']
#args = json.loads(data['arguments'])
#args = [self.parse_object(a) for a in args]
#self.comm.send([{'cmd': 'debug', 'data': data}])
#cmd = {'cmd': 'debug', 'data': data}
#baseObj.cmds.append(cmd)
evt = {}
evt['pos'] = tuple(args[0]['pos'])
evt['type'] = args[0]['type']
evt['which'] = args[0]['which']
evt['event'] = args[0]['type']
evt['button'] = 'left' if evt['which'] == 1 else 'right' if evt['which'] == 2 else 'middle'
evt['pickpos'] = args[0]['mouse']['pickpos']
if 'pickguid' in args[0]['mouse']:
pickguid = args[0]['mouse']['pickguid']
evt['pick'] = object_registry[pickguid]
else:
evt['pick'] = None
"""
mouse = {}
if 'pickguid' in args[0]['mouse']:
pickguid = args[0]['mouse']['pickguid']
mouse['pick'] = object_registry[pickguid]
else:
mouse['pick'] = None
#mouse['pickpos'] = args[0]['mouse']['pickpos']
#mouse['ray'] = args[0]['mouse']['ray']
#mouse['alt'] = args[0]['mouse']['alt']
mouse['ctrl'] = args[0]['mouse']['ctrl']
mouse['shift'] = args[0]['mouse']['shift']
evt['mouse'] = AllMyFields(mouse)
"""
mouse = Mouse(pos = evt['pos'], pick = evt['pick'], pickpos = args[0]['mouse']['pickpos'], alt = args[0]['mouse']['alt'], ctrl = args[0]['mouse']['ctrl'],
shift = args[0]['mouse']['shift'])
evt['mouse'] = mouse
if 'scene' in data:
sguid = data['scene']
object_registry[sguid].mouse = mouse
tp = inspect.getargspec(callback) # named tuple of callback args (args, varargs, keywords, defaults)
if (len(tp.args) == 0) and (tp.varargs == None) and (tp.keywords == None) and (tp.defaults == None):
callback()
elif (len(tp.args) == 1) and (tp.varargs == None) and (tp.keywords == None) and (tp.defaults == None):
callback(AllMyFields(evt))
elif (len(tp.args) >= 2) and (tp.varargs == None) and (tp.keywords == None) and (tp.defaults == None):
addArgs = self.parse_object(args[1])
if type(addArgs) is list:
callback(AllMyFields(evt),*addArgs)
else:
callback(AllMyFields(evt), addArgs)
elif (len(tp.args) == 1) and (tp.varargs != None) and (tp.keywords == None) and (tp.defaults == None):
if len(data['arguments']) > 1:
addArgs = self.parse_object(args[1])
if type(addArgs) is list:
callback(AllMyFields(evt),*addArgs)
else:
ta = [addArgs]
callback(AllMyFields(evt),*ta)
else:
callback(AllMyFields(evt))
elif (len(tp.args) == 0) and (tp.varargs != None) and (tp.keywords == None) and (tp.defaults == None):
if len(data['arguments']) > 1:
addArgs = self.parse_object(args[1])
ta = [AllMyFields(evt),addArgs]
callback(*ta)
else:
ta = [AllMyFields(evt)]
callback(*ta)
else:
#cmd = {'cmd': 'debug', 'data': {'cb_called': False}}
#baseObj.cmds.append(cmd)
pass
def handle_close(self, data):
print ("Comm closed")
def get_execution_count(self):
return get_ipython().execution_count
def parse_object(self, obj):
if type(obj) in [str, int, long, bool, float, tuple, complex]:
return obj
elif isinstance(obj, collections.Sequence):
if type(obj) is list:
lst = []
for itm in obj:
if isinstance(itm, collections.Sequence) and ('guido' in itm):
lst.append(object_registry[itm['guido']])
else:
lst.append(itm)
if (len(lst) == 3) and (type(lst[0]) in [int, long, float]) and (type(lst[1]) in [int, long, float]) and (type(lst[2]) in [int, long, float]):
return tuple(lst)
return lst
elif 'guido' in obj:
return object_registry[obj['guido']]
return obj
if IPython.__version__ >= '3.0.0' :
get_ipython().kernel.comm_manager.register_target('glow', GlowWidget)
else:
get_ipython().comm_manager.register_target('glow', GlowWidget)
display(Javascript("""require.undef("nbextensions/glow.1.0.min");"""))
display(Javascript("""require.undef("nbextensions/jquery-ui.custom.min");"""))
display(Javascript("""require.undef("nbextensions/glow.1.1.min");"""))
display(Javascript("""require.undef("nbextensions/glowcomm");"""))
display(Javascript("""require(["nbextensions/glowcomm"], function(){console.log("glowcomm loaded");})"""))
get_ipython().kernel.do_one_iteration()
class vector(object):
'vector class'
__change_notifications = None
def __init__(self, x=(0.,0.,0.), y=0., z=0.):
if isinstance(x, (int, long, float)) and isinstance(y, (int, long, float)) and isinstance(z, (int, long, float)):
self.__dict__['x'] = x
self.__dict__['y'] = y
self.__dict__['z'] = z
elif isinstance(x, (complex)) or isinstance(y, (complex)) or isinstance(z, (complex)):
raise Exception("ArgumentError: complex argument not supported for vector(arg0,arg1,arg2)")
else:
self.__dict__['x'] = x[0]
self.__dict__['y'] = x[1]
self.__dict__['z'] = x[2]
#self.__dict__['shape'] = (3L,) # python 2
self.__dict__['shape'] = (3,) # python 3K
self.__change_notifications = set()
def __str__(self):
return '<%f, %f, %f>' % (self.x, self.y, self.z)
def __repr__(self):
return '<%f, %f, %f>' % (self.x, self.y, self.z)
def __array__(self, dtypes = [None]):
return np.array((self.x, self.y, self.z), dtype = dtypes[0])
def __add__(self,other):
if type(other) in [np.ndarray, tuple, list]:
return vector(self.x + other[0], self.y + other[1], self.z + other[2])
else:
return vector(self.x + other.x, self.y + other.y, self.z + other.z)
def __sub__(self,other):
if type(other) in [np.ndarray, tuple, list]:
return vector(self.x - other[0], self.y - other[1], self.z - other[2])
else:
return vector(self.x - other.x, self.y - other.y, self.z - other.z)
def __mul__(self, other):
if isinstance(other, (int, long, float)):
return vector(self.x * other, self.y * other, self.z * other)
elif isinstance(other, (complex)):
raise Exception("TypeError: unsupported operand type(s) for *: 'complex' and 'vector'")
return self
def __rmul__(self, other):
if isinstance(other, (int, long, float)):
return vector(self.x * other, self.y * other, self.z * other)
elif isinstance(other, (complex)):
raise Exception("TypeError: unsupported operand type(s) for *: 'complex' and 'vector'")
return self
def __div__(self, other):
if isinstance(other, (int, long, float)):
return vector(self.x / other, self.y / other, self.z / other)
elif isinstance(other, (complex)):
raise Exception("TypeError: unsupported operand type(s) for /: 'complex' and 'vector'")
return self
def __truediv__(self, other):
if isinstance(other, (int, long, float)):
return vector(self.x / other, self.y / other, self.z / other)
elif isinstance(other, (complex)):
raise Exception("TypeError: unsupported operand type(s) for /: 'complex' and 'vector'")
return self
def __neg__(self):
return vector(-1.*self.x, -1.*self.y, -1.*self.z)
def __getitem__(self,key):
if key == 0:
return self.x
elif key == 1:
return self.y
elif key == 2:
return self.z
else:
return
def __setitem__(self,key,value):
if key == 0:
self.x = value
elif key == 1:
self.y = value
elif key == 2:
self.z = value
def __del__(self):
self.__change_notifications.clear()
def mag(self):
return np.linalg.norm(np.array([self.x,self.y,self.z]))
def mag2(self):
return self.mag()*self.mag()
def norm(self):
smag = self.mag()
if (smag > 0.):
return self / self.mag()
else:
return vector(0.,0.,0.)
def dot(self,other):
if type(other) is np.ndarray:
return np.dot(np.array([self.x,self.y,self.z]),other)
else:
return np.dot(np.array([self.x,self.y,self.z]),np.array([other.x,other.y,other.z]))
def cross(self,other):
if type(other) is np.ndarray:
return vector(np.cross(np.array([self.x,self.y,self.z]),other))
elif (type(other) is tuple) or (type(other) is list):
return vector(np.cross(np.array([self.x,self.y,self.z]),np.array(other)))
else:
return vector(np.cross(np.array([self.x,self.y,self.z]),np.array([other.x,other.y,other.z])))
def proj(self,other):
normB = other.norm()
return self.dot(normB) * normB
def comp(self,other):
normB = other.norm()
return self.dot(normB) * normB
def diff_angle(self, other):
return np.arccos(np.clip(self.norm().dot(other.norm()),-1.,1.))
def rotate(self,angle = 0.,axis = (0,0,1)):
if type(axis) is np.ndarray:
axis = axis/math.sqrt(np.dot(axis,axis))
elif (type(axis) is tuple) or (type(axis) is list):
axis = np.array(axis)
axis = axis/math.sqrt(np.dot(axis,axis))
else:
axis = axis/math.sqrt(axis.dot(axis))
axis = np.array([axis.x,axis.y,axis.z])
a = math.cos(angle/2)
b,c,d = -axis*math.sin(angle/2)
mat = np.array([[a*a+b*b-c*c-d*d, 2*(b*c-a*d), 2*(b*d+a*c)],
[2*(b*c+a*d), a*a+c*c-b*b-d*d, 2*(c*d-a*b)],
[2*(b*d-a*c), 2*(c*d+a*b), a*a+d*d-b*b-c*c]])
v = np.array([self.x,self.y,self.z])
res = np.dot(mat,v)
self.x = res[0]
self.y = res[1]
self.z = res[2]
def astuple(self):
return (self.x,self.y,self.z)
def keys(self):
return [0,1,2]
def values(self):
return [self.x,self.y,self.z]
def __setattr__(self, name, value):
if name in ['mag','mag2']:
normA = self.norm()
if name == 'mag':
self.__dict__['x'] = value * normA.x
self.__dict__['y'] = value * normA.y
self.__dict__['z'] = value * normA.z
self.on_change()
elif name == 'mag2':
self.__dict__['x'] = math.sqrt(value) * normA.x
self.__dict__['y'] = math.sqrt(value) * normA.y
self.__dict__['z'] = math.sqrt(value) * normA.z
self.on_change()
elif name in ['x','y','z']:
if getattr(self, name) != value:
self.on_change()
self.__dict__[name] = value
else:
super(vector, self).__setattr__(name, value)
def add_notification(self, tup):
self.__change_notifications.add(tup)
def remove_notification(self, tup):
self.__change_notifications.remove(tup)
def on_change(self):
for tup in self.__change_notifications:
tup[0](tup[1])
def mag(A):
if (type(A) is np.ndarray) or (type(A) is tuple) or (type(A) is list):
return vector(A).mag()
else:
return A.mag()
def mag2(A):
if (type(A) is np.ndarray) or (type(A) is tuple) or (type(A) is list):
return vector(A).mag2()
else:
return A.mag2()
def norm(A):
if (type(A) is np.ndarray) or (type(A) is tuple) or (type(A) is list):
return vector(A).norm()
else:
return A.norm()
def dot(A,B):
if (type(A) is np.ndarray) or (type(A) is tuple) or (type(A) is list):
return vector(A).dot(B)
else:
return A.dot(B)
def cross(A,B):
if (type(A) is np.ndarray) or (type(A) is tuple) or (type(A) is list):
return vector(A).cross(B)
else:
return A.cross(B)
def proj(A,B):
if (type(A) is np.ndarray) or (type(A) is tuple) or (type(A) is list):
return vector(A).proj(B)
else:
return A.proj(B)
def comp(A,B):
if (type(A) is np.ndarray) or (type(A) is tuple) or (type(A) is list):
return vector(A).comp(B)
else:
return A.comp(B)
def diff_angle(A,B):
if (type(A) is np.ndarray) or (type(A) is tuple) or (type(A) is list):
return vector(A).diff_angle(B)
else:
return A.diff_angle(B)
def rotate(A,angle = 0.,axis = (0,0,1)):
if (type(A) is np.ndarray) or (type(A) is tuple) or (type(A) is list):
return vector(A).rotate(angle,axis)
else:
return A.rotate(angle,axis)
def astuple(A):
if (type(A) is np.ndarray) or (type(A) is tuple) or (type(A) is list):
return vector(A).astuple()
else:
return A.astuple()
class baseAttrs(baseObj):
pos = vector(0.,0.,0.)
x = 0.
y = 0.
z = 0.
size = vector(1.,1.,1.)
axis = vector(1.,0.,0.)
up = vector(0.,1.,0.)
red = 1.
green = 1.
blue = 1.
visible = False
def __init__(self, pos=(0.,0.,0.), x=0., y=0., z=0., axis=(1.,0.,0.), size=(1.,1.,1.), visible=True,
up=(0.,1.,0.), color=(1.,1.,1.), red=1., green=1., blue=1., frame=None, display=None, **kwargs):
super(baseAttrs, self).__init__(**kwargs)
if (x != 0.) or (y != 0.) or (z != 0.):
pos = vector(x,y,z) if type(pos) is tuple else pos
else:
x = pos[0]
y = pos[1]
z = pos[2]
if (red != 1.) or (green != 1.) or (blue != 1.):
color = (red,green,blue)
else:
red = color[0]
green = color[1]
blue = color[2]
object.__setattr__(self, 'pos', vector(pos) if type(pos) in [tuple, list, np.ndarray] else pos )
object.__setattr__(self, 'x', x)
object.__setattr__(self, 'y', y)
object.__setattr__(self, 'z', z)
object.__setattr__(self, 'axis', vector(axis) if type(axis) in [tuple, list, np.ndarray] else axis)
object.__setattr__(self, 'size', vector(size) if type(size) in [tuple, list, np.ndarray] else size)
object.__setattr__(self, 'up', vector(up) if type(up) in [tuple, list, np.ndarray] else up)
object.__setattr__(self, 'color', color)
object.__setattr__(self, 'red', red)
object.__setattr__(self, 'green', green)
object.__setattr__(self, 'blue', blue)
object.__setattr__(self, 'visible', visible)
object.__setattr__(self, 'display', display)
object.__setattr__(self, 'frame', frame)
object.__getattribute__(self, 'pos').add_notification((self.addattr,'pos'))
object.__getattribute__(self, 'axis').add_notification((self.addattr,'axis'))
object.__getattribute__(self, 'size').add_notification((self.addattr,'size'))
object.__getattribute__(self, 'up').add_notification((self.addattr,'up'))
def __setattr__(self, name, value):
if name in ['pos','size','axis','up','visible','x','y','z','red','green','blue']:
if name in ['pos','axis','size','up']:
self.__dict__[name].remove_notification((self.addattr,name))
self.__dict__[name] = value if type(value) is vector else vector(value) if type(value) in [tuple, list, np.ndarray] else value
if name == 'x':
self.__dict__['pos'][0] = value
self.addattr('pos')
elif name == 'y':
self.__dict__['pos'][1] = value
self.addattr('pos')
elif name == 'z':
self.__dict__['pos'][2] = value
self.addattr('pos')
elif name == 'pos':
self.__dict__[name].add_notification((self.addattr,name))
self.addattr(name)
elif name == 'axis':
self.__dict__[name].add_notification((self.addattr,name))
self.addattr(name)
elif name == 'size':
self.__dict__[name].add_notification((self.addattr,name))
self.addattr(name)
elif name == 'up':
self.__dict__[name].add_notification((self.addattr,name))
self.addattr(name)
elif name == 'visible':
self.addattr(name)
elif name == 'red':
self.__dict__['color'] = (value,self.green,self.blue)
self.addattr('color')
elif name == 'green':
self.__dict__['color'] = (self.red,value,self.blue)
self.addattr('color')
elif name == 'blue':
self.__dict__['color'] = (self.red,self.green,value)
self.addattr('color')
elif name == 'color':
self.__dict__[name] = value
self.addattr(name)
else:
super(baseAttrs, self).__setattr__(name, value)
def __getattribute__(self, name):
if name in ['x','y','z','red','green','blue']:
if name == 'x':
return object.__getattribute__(self, 'pos')[0]
elif name == 'y':
return object.__getattribute__(self, 'pos')[1]
elif name == 'z':
return object.__getattribute__(self, 'pos')[2]
if name == 'red':
return object.__getattribute__(self, 'color')[0]
elif name == 'green':
return object.__getattribute__(self, 'color')[1]
elif name == 'blue':
return object.__getattribute__(self, 'color')[2]
else:
return super(baseAttrs, self).__getattribute__(name)
def rotate(self, angle = math.pi/4, axis = axis, origin = pos):
axis = vector(axis) if type(axis) in [tuple, list, np.ndarray] else axis
origin = vector(origin) if type(origin) in [tuple, list, np.ndarray] else origin
cmd = {"cmd": "rotate", "idx": self.idx,
"attrs": [{"attr": "pos", "value": origin.values()},
{"attr": "axis", "value": axis.values()},
{"attr": "angle", "value": angle}]}
if (baseObj.glow != None):
baseObj.glow.comm.send([cmd])
else:
self.appendcmd(cmd)
#baseObj.cmds.append(cmd)
def __del__(self):
for attr in ['pos','axis','size','up']:
object.__getattribute__(self, attr).remove_notification((self.addattr,attr))
super(baseAttrs, self).__del__()
class baseAttrs2(baseAttrs):
texture = None
opacity = 1.0
shininess = 0.6
emissive = False
def __init__(self, pos=(0.,0.,0.), x=0., y=0., z=0., axis=(1.,0.,0.), size=(1.,1.,1.), visible=True,
up=(0.,1.,0.), color=(1.,1.,1.), red=1., green=1., blue=1., frame=None, display=None, material=None,
opacity=1.0, **kwargs):
super(baseAttrs2, self).__init__(pos=pos, axis=axis, size=size, up=up, color=color, red=red, green=green, blue=blue,
x=x, y=y, z=z, frame=frame, display=display, visible=visible, **kwargs)
object.__setattr__(self, 'texture', None )
object.__setattr__(self, 'opacity', opacity )
object.__setattr__(self, 'shininess', 0.6)
object.__setattr__(self, 'emissive', False)
if (material != None):
if (material == materials.emissive):
object.__setattr__(self, 'emissive', True)
elif (material == materials.plastic):
object.__setattr__(self, 'emissive', False)
else:
pass
def __setattr__(self, name, value):
if name in ['material','opacity']:
if name == 'material':
if (value == materials.emissive):
object.__setattr__(self, 'emissive', True)
self.addattr('emissive')
elif (value == materials.plastic):
object.__setattr__(self, 'emissive', False)
self.addattr('emissive')
else:
object.__setattr__(self, 'emissive', False)
self.addattr('emissive')
elif name == 'opacity':
self.__dict__[name] = value
self.addattr(name)
else:
super(baseAttrs2, self).__setattr__(name, value)
def __del__(self):
super(baseAttrs2, self).__del__()
class trailAttrs(baseAttrs2):
make_trail = False
trail_type = "curve"
interval = 10
retain = 50
trail_object = None
def __init__(self, pos=(0.,0.,0.), x=0., y=0., z=0., axis=(1.,0.,0.), size=(1.,1.,1.), visible=True,
up=(0.,1.,0.), color=(1.,1.,1.), red=1., green=1., blue=1., frame=None, display=None, material=None,
opacity=1.0, make_trail=False, trail_type="curve", interval=10, retain=50, **kwargs):
super(trailAttrs, self).__init__(pos=pos, axis=axis, size=size, up=up, color=color, red=red, green=green,
blue=blue, x=x, y=y, z=z, frame=frame, display=display, visible=visible,
material=material, opacity=opacity, **kwargs)
object.__setattr__(self, 'make_trail', make_trail )
object.__setattr__(self, 'trail_type', trail_type )
object.__setattr__(self, 'interval', interval)
object.__setattr__(self, 'retain', retain)
#object.__setattr__(self, 'trail_object', curve() if self.trail_type == "curve" else pnts())
def __setattr__(self, name, value):
if name in ['make_trail','trail_type','interval','retain']:
self.__dict__[name] = value
self.addattr(name)
else:
super(trailAttrs, self).__setattr__(name, value)
def __del__(self):
super(trailAttrs, self).__del__()
class box(trailAttrs):
"""see box documentation at http://vpython.org/contents/docs/box.html"""
def __init__(self, pos=(0.,0.,0.), x=0., y=0., z=0., axis=(1.,0.,0.), size=(1.,1.,1.),
length=-1., width=1., height=1., up=(0.,1.,0.), color=(1.,1.,1.), red=1., green=1., blue=1.,
frame=None, material=None, opacity=1.0, display=None, visible=True,
make_trail=False, trail_type="curve", interval=10, retain=50, **kwargs):
axis = vector(axis) if type(axis) in [tuple, list, np.ndarray] else axis
size = vector(size) if type(size) in [tuple, list, np.ndarray] else size
if (length == -1.):
if size[0] == 1. and size[1] == 1. and size[2] == 1.:
length = axis.mag()
size[0] = length
else:
length = size[0]
height = size[1]
width = size[2]
if (length != 1.0) or (width != 1.0) or (height != 1.0):
size = vector(length,height,width)
else:
length = size[0]
height = size[1]
width = size[2]
axis = axis.norm() * length
super(box, self).__init__(pos=pos, x=x, y=y, z=z, axis=axis, size=size, up=up, color=color, red=red, green=green, blue=blue,
material=material, opacity=opacity, frame=frame, display=display, visible=visible,
make_trail=make_trail, trail_type=trail_type, interval=interval, retain=retain, **kwargs)
object.__setattr__(self, 'length', length)
object.__setattr__(self, 'width', width)
object.__setattr__(self, 'height', height)
cmd = {"cmd": "box", "idx": self.idx, "guid": self.guid,
"attrs": [{"attr": "pos", "value": self.pos.values()},
{"attr": "axis", "value": self.axis.values()},
{"attr": "size", "value": self.size.values()},
{"attr": "up", "value": self.up.values()},
{"attr": "color", "value": list(self.color)},
{"attr": "opacity", "value": self.opacity},
{"attr": "shininess", "value": self.shininess},
{"attr": "emissive", "value": self.emissive},
{"attr": "canvas", "value": self.display.idx if self.display != None else canvas.get_selected().idx if canvas.get_selected() != None else -1},
{"attr": "make_trail", "value": self.make_trail},
{"attr": "type", "value": 'curve' if self.trail_type == 'curve' else 'spheres'},
{"attr": "interval", "value": self.interval},
{"attr": "visible", "value": self.visible},
{"attr": "retain", "value": self.retain}]}
self.appendcmd(cmd)
if (frame != None):
frame.objects.append(self)
frame.update_obj_list()
def __setattr__(self, name, value):
if name in ['size','length','width','height']:
if name in ['size']:
self.__dict__[name].remove_notification((self.addattr,name))
val = value if type(value) is vector else vector(value) if type(value) in [tuple, list, np.ndarray] else value
self.__dict__[name] = val
if name == 'size':
self.__dict__['axis'] = self.axis.norm() * value[0]
self.__dict__['height'] = value[1]
self.__dict__['width'] = value[2]
self.__dict__[name].add_notification((self.addattr,name))
self.addattr(name)
elif name == 'length':
self.__dict__['axis'] = self.axis.norm() * value
self.addattr('axis')
elif name == 'height':
self.addattr('size')
elif name == 'width':
self.addattr('size')
else:
super(box, self).__setattr__(name, value)
def __getattribute__(self, name):
if name in ['size','length']:
if name == 'length':
return object.__getattribute__(self, 'axis').mag()
elif name == 'size':
return vector(object.__getattribute__(self, 'axis').mag(), object.__getattribute__(self, 'height'), object.__getattribute__(self, 'width'))
else:
return super(box, self).__getattribute__(name)
def __del__(self):
cmd = {"cmd": "delete", "idx": self.idx}
self.appendcmd(cmd)
#baseObj.cmds.append(cmd)
super(box, self).__del__()
class cone(trailAttrs):
"""see cone documentation at http://vpython.org/contents/docs/cone.html"""
def __init__(self, pos=(0.,0.,0.), x=0., y=0., z=0., axis=(1.,0.,0.), length=-1., radius=1.,
frame=None, up=(0.,1.,0.), color=(1.,1.,1.), red=1., green=1., blue=1., material=None, opacity=1.0,
display=None, visible=True, make_trail=False, trail_type="curve", interval=10, retain=50, **kwargs):
axis = vector(axis) if type(axis) in [tuple, list, np.ndarray] else axis
if (length == -1.):
length = axis.mag()
else:
axis = axis.norm() * length
size = vector(length,radius*2,radius*2)
super(cone, self).__init__(pos=pos, x=x, y=y, z=z, axis=axis, size=size, up=up, color=color, red=red, green=green, blue=blue,
material=material, opacity=opacity, frame=frame, display=display, visible=visible,
make_trail=make_trail, trail_type=trail_type, interval=interval, retain=retain, **kwargs)
object.__setattr__(self, 'length', length)
object.__setattr__(self, 'radius', radius)
cmd = {"cmd": "cone", "idx": self.idx, "guid": self.guid,
"attrs": [{"attr": "pos", "value": self.pos.values()},
{"attr": "axis", "value": self.axis.values()},
{"attr": "size", "value": self.size.values()},
{"attr": "up", "value": self.up.values()},
{"attr": "color", "value": list(self.color)},
{"attr": "opacity", "value": self.opacity},
{"attr": "shininess", "value": self.shininess},
{"attr": "emissive", "value": self.emissive},
{"attr": "canvas", "value": self.display.idx if self.display != None else canvas.get_selected().idx if canvas.get_selected() != None else -1},
{"attr": "make_trail", "value": self.make_trail},
{"attr": "type", "value": 'curve' if self.trail_type == 'curve' else 'spheres'},
{"attr": "interval", "value": self.interval},
{"attr": "visible", "value": self.visible},
{"attr": "retain", "value": self.retain}]}
self.appendcmd(cmd)
if (frame != None):
frame.objects.append(self)
frame.update_obj_list()
def __setattr__(self, name, value):
if name in ['length','radius']:
val = value if type(value) is vector else vector(value) if type(value) in [tuple, list, np.ndarray] else value
self.__dict__[name] = val
if name == 'length':
self.__dict__['axis'] = self.axis.norm() * value
self.addattr('axis')
elif name == 'radius':
self.addattr('size')
else:
super(cone, self).__setattr__(name, value)
def __getattribute__(self, name):
if name in ['size','length']:
if name == 'length':
return object.__getattribute__(self, 'axis').mag()
elif name == 'size':
return vector(object.__getattribute__(self, 'axis').mag(), object.__getattribute__(self, 'radius')*2.0, object.__getattribute__(self, 'radius')*2.0)
else:
return super(cone, self).__getattribute__(name)
def __del__(self):
cmd = {"cmd": "delete", "idx": self.idx}
self.appendcmd(cmd)
super(cone, self).__del__()
class curve(baseAttrs2):
"""see curve documentation at http://vpython.org/contents/docs/curve.html"""
xs = np.array([],float)
ys = np.array([],float)
zs = np.array([],float)
def __init__(self, pos=[], x=[], y=[], z=[], axis=(1.,0.,0.), radius=0., display=None, visible=True,
up=(0.,1.,0.), color=[], red=[], green=[], blue=[], frame=None, material=None, **kwargs):
if type(pos) is list:
for idx, val in enumerate(pos):
if type(val) is not tuple:
pos[idx] = astuple(val)
posns = np.array(pos, dtype=('f4,f4,f4')) if type(pos) is list and (len(pos) == 0 or len(pos[0]) == 3) else np.array(pos, dtype=('f4,f4')) if type(pos) is list and len(pos[0]) == 2 else pos
if len(posns) > 0:
xs = posns['f0']
ys = posns['f1']
if(len(posns[0]) == 3):
zs = posns['f2']
else:
zs = np.zeros(len(posns))
elif (len(x) > 0) or (len(y) > 0) or (len(z) > 0):
lsz = max(len(x),len(y),len(z))
if len(x) < lsz:
if len(x) > 0:
a = np.array(x, float) if type(x) is list or tuple else np.array([x], float) if type(x) is float or int else x
b = np.zeros(lsz-len(a))
x = np.concatenate(a,b)
else:
x = np.zeros(lsz)
if len(y) < lsz:
if len(y) > 0:
a = np.array(y, float) if type(y) is list or tuple else np.array([y], float) if type(y) is float or int else y
b = np.zeros(lsz-len(a))
y = np.concatenate(a,b)
else:
y = np.zeros(lsz)
if len(z) < lsz:
if len(z) > 0:
a = np.array(z, float) if type(z) is list or tuple else np.array([z], float) if type(z) is float or int else z
b = np.zeros(lsz-len(a))
z = np.concatenate(a,b)
else:
z = np.zeros(lsz)
posns = np.zeros(lsz, dtype=('f4,f4,f4'))
posns['f0'] = x
posns['f1'] = y
posns['f2'] = z
xs = np.array(x, float) if type(x) is list or tuple else np.array([x], float) if type(x) is float or int else x
ys = np.array(y, float) if type(y) is list or tuple else np.array([y], float) if type(y) is float or int else y
zs = np.array(z, float) if type(z) is list or tuple else np.array([z], float) if type(z) is float or int else z
colors = np.array(color, dtype=('f4,f4,f4')) if type(color) is list else np.array([color], dtype=('f4,f4,f4')) if type(color) is tuple else color
if len(colors) > 0:
reds = colors['f0']
greens = colors['f1']
blues = colors['f2']
elif (len(red) > 0) or (len(green) > 0) or (len(blue) > 0):
lsz = max(len(red),len(green),len(blue))
if len(red) < lsz:
if len(red) > 0:
a = np.array(red, float) if type(red) is list or tuple else np.array([red], float) if type(red) is float or int else red
b = np.zeros(lsz-len(a))
red = np.concatenate(a,b)
else:
red = np.zeros(lsz)
if len(green) < lsz:
if len(green) > 0:
a = np.array(green, float) if type(green) is list or tuple else np.array([green], float) if type(green) is float or int else green
b = np.zeros(lsz-len(a))
green = np.concatenate(a,b)
else:
green = np.zeros(lsz)
if len(blue) < lsz:
if len(blue) > 0:
a = np.array(blue, float) if type(blue) is list or tuple else np.array([blue], float) if type(blue) is float or int else blue
b = np.zeros(lsz-len(a))
blue = np.concatenate(a,b)
else:
blue = np.zeros(lsz)
colors = np.zeros(lsz, dtype = ('f4,f4,f4'))
colors['f0'] = red
colors['f1'] = green
colors['f2'] = blue
else:
colors = np.ones(1, dtype = ('f4,f4,f4'))
reds = colors['f0']
greens = colors['f1']
blues = colors['f2']
reds = np.array(red, float) if type(red) is list or tuple else np.array([red], float) if type(red) is float or int else red
greens = np.array(green, float) if type(green) is list or tuple else np.array([green], float) if type(green) is float or int else green
blues = np.array(blue, float) if type(blue) is list or tuple else np.array([blue], float) if type(blue) is float or int else blue
pnts = []
cols = []
if len(posns) > 0:
i = 0
col = colors[-1]
for posn in posns:
col = colors[i] if len(colors) > i else colors[-1]
if i >= len(colors):
cols.append(col)
if (len(posn) == 3):
pnts.append({"pos": posn.tolist(), "color": col.tolist()})
elif(len(posn) == 2):
p3 = list(posn)
p3.append(0.0)
p3a = np.array([tuple(p3)], dtype=('f4,f4,f4'))
pnts.append({"pos": p3a[0].tolist(), "color": col.tolist()})
i += 1
if len(cols) > 0:
colors = np.append(colors, np.array(cols, dtype=colors.dtype))
super(curve, self).__init__(axis=axis, up=up, material=material, frame=frame, display=display, visible=visible, **kwargs)
object.__getattribute__(self, 'pos').remove_notification((self.addattr,'pos'))
object.__setattr__(self, 'radius', radius)
object.__setattr__(self, 'color', colors)
object.__setattr__(self, 'pos', posns)
object.__setattr__(self, 'x', xs)
object.__setattr__(self, 'y', ys)
object.__setattr__(self, 'z', zs)
object.__setattr__(self, 'red', reds)
object.__setattr__(self, 'green', greens)
object.__setattr__(self, 'blue', blues)
cmd = {"cmd": "curve", "idx": self.idx, "guid": self.guid,
"attrs": [#{"attr": "pos", "value": self.pos.values()},
#{"attr": "axis", "value": self.axis.values()},
#{"attr": "size", "value": self.size.values()},
#{"attr": "up", "value": self.up.values()},
#{"attr": "color", "value": list(self.color)},
#{"attr": "shininess", "value": self.shininess},
#{"attr": "emissive", "value": self.emissive},
#{"attr": "pnts", "value": [{"pos": [0, 0, 0]}, {"pos": [1, 0, 0]}]},
#{"attr": "pnts", "value": pntsa.tolist()},
{"attr": "pnts", "value": pnts},
{"attr": "radius", "value": self.radius},
{"attr": "visible", "value": self.visible},
{"attr": "canvas", "value": self.display.idx if self.display != None else canvas.get_selected().idx if canvas.get_selected() != None else -1}]}
self.appendcmd(cmd)
def __setattr__(self, name, value):
if name in ['pos','color','x','y','z','red','green','blue','radius']:
if name == 'radius':
self.__dict__[name] = vector(value) if type(value) is tuple else value
self.addattr(name)
elif name == 'pos':
if type(value) is list:
for idx, val in enumerate(value):
if type(val) is not tuple:
value[idx] = astuple(val)
self.__dict__[name] = np.array(value, dtype = ('f4,f4,f4')) if type(value) is list and (len(value) == 0 or len(value[0]) == 3) else np.array(value, dtype=('f4,f4')) if type(value) is list and len(value[0]) == 2 else value
self.__dict__['x'] = self.pos['f0']
self.__dict__['y'] = self.pos['f1']
if len(value[0]) == 3:
self.__dict__['z'] = self.pos['f2']
cmd = {"cmd": "modify", "idx": self.idx,
"attrs":[{"attr": 'posns', "value": self.pos.tolist()}]}
baseObj.cmds.append(cmd)
else:
posns = []
if len(self.pos) > 0:
for posn in self.pos:
p3 = list(posn)
p3.append(0.0)
posns.append(tuple(p3))
posns2 = np.array(posns, dtype = ('f4,f4,f4'))
cmd = {"cmd": "modify", "idx": self.idx,
"attrs":[{"attr": 'posns', "value": posns2.tolist()}]}
baseObj.cmds.append(cmd)
elif name == 'x':
self.__dict__[name] = np.array(value, float) if type(value) is list or tuple else np.array([value], float) if type(value) is float or int else value
self.__dict__['pos']['f0'] = self.x
cmd = {"cmd": "modify", "idx": self.idx,
"attrs":[{"attr": name, "value": self.x.tolist()}]}
baseObj.cmds.append(cmd)
elif name == 'y':
self.__dict__[name] = np.array(value, float) if type(value) is list or tuple else np.array([value], float) if type(value) is float or int else value
self.__dict__['pos']['f1'] = self.y
cmd = {"cmd": "modify", "idx": self.idx,
"attrs":[{"attr": name, "value": self.y.tolist()}]}
baseObj.cmds.append(cmd)
elif name == 'z':
self.__dict__[name] = np.array(value, float) if type(value) is list or tuple else np.array([value], float) if type(value) is float or int else value
self.__dict__['pos']['f2'] = self.z
cmd = {"cmd": "modify", "idx": self.idx,
"attrs":[{"attr": name, "value": self.z.tolist()}]}
baseObj.cmds.append(cmd)
elif name == 'red':
self.__dict__[name] = np.array(value, float) if type(value) is list or tuple else np.array([value], float) if type(value) is float or int else value
self.__dict__['color']['f0'] = self.red
cmd = {"cmd": "modify", "idx": self.idx,
"attrs":[{"attr": name, "value": self.red.tolist()}]}
baseObj.cmds.append(cmd)
elif name == 'green':
self.__dict__[name] = np.array(value, float) if type(value) is list or tuple else np.array([value], float) if type(value) is float or int else value
self.__dict__['color']['f1'] = self.green
cmd = {"cmd": "modify", "idx": self.idx,
"attrs":[{"attr": name, "value": self.green.tolist()}]}
baseObj.cmds.append(cmd)
elif name == 'blue':
self.__dict__[name] = np.array(value, float) if type(value) is list or tuple else np.array([value], float) if type(value) is float or int else value
self.__dict__['color']['f2'] = self.blue
cmd = {"cmd": "modify", "idx": self.idx,
"attrs":[{"attr": name, "value": self.blue.tolist()}]}
baseObj.cmds.append(cmd)
elif name == 'color':
self.__dict__[name] = np.array(value, dtype=('f4,f4,f4')) if type(value) is list and (len(value) == 0 or len(value[0]) == 3) else np.array(value, dtype=('f4,f4')) if type(value) is list and len(value[0]) == 2 else value
self.__dict__['red'] = self.color['f0']
self.__dict__['green'] = self.color['f1']
self.__dict__['blue'] = self.color['f2']
cmd = {"cmd": "modify", "idx": self.idx,
"attrs":[{"attr": 'colors', "value": self.color.tolist()}]}
baseObj.cmds.append(cmd)
else:
super(curve, self).__setattr__(name, value)
def append(self, pos = None, color = None, red = None, green = None, blue = None):
if (red is not None) and (green is not None) and (blue is not None):
color = (red,green,blue)
if (pos is not None) and (color is not None):
if type(pos) is not tuple:
pos = astuple(pos)
self.__dict__['pos'] = np.append(self.pos, np.array([pos], dtype=self.pos.dtype))
self.__dict__['color'] = np.append(self.color, np.array([color], dtype=self.color.dtype))
pos = list(pos)
if len(pos) == 2:
pos.append(0.0)
cmd = {"cmd": "push", "idx": self.idx,
"attrs":[{"attr": "pos", "value": pos},{"attr": "color", "value": list(color)}]}
baseObj.cmds.append(cmd)
elif (pos is not None):
if type(pos) is not tuple:
pos = astuple(pos)
self.__dict__['pos'] = np.append(self.pos, np.array([pos], dtype=self.pos.dtype))
color = self.color[-1]
self.__dict__['color'] = np.append(self.color, np.array([color], dtype=self.color.dtype))
pos = list(pos)
if len(pos) == 2:
pos.append(0.0)
cmd = {"cmd": "push", "idx": self.idx,
"attrs":[{"attr": "pos", "value": pos},{"attr": "color", "value": self.color[-1].tolist()}]}
baseObj.cmds.append(cmd)
def __del__(self):
pass
class points(baseAttrs2):
"""see points documentation at http://vpython.org/contents/docs/points.html"""
xs = np.array([],float)
ys = np.array([],float)
zs = np.array([],float)
def __init__(self, pos=[], x=[], y=[], z=[], size=5, size_units="pixels", shape="round",
display=None, visible=True, color=[], red=[], green=[], blue=[], frame=None, **kwargs):
if type(pos) is list:
for idx, val in enumerate(pos):
if type(val) is not tuple:
pos[idx] = astuple(val)
posns = np.array(pos, dtype=('f4,f4,f4')) if type(pos) is list and (len(pos) == 0 or len(pos[0]) == 3) else np.array(pos, dtype=('f4,f4')) if type(pos) is list and len(pos[0]) == 2 else pos
if len(posns) > 0:
xs = posns['f0']
ys = posns['f1']
if(len(posns[0]) == 3):
zs = posns['f2']
else:
zs = np.zeros(len(posns))
elif (len(x) > 0) or (len(y) > 0) or (len(z) > 0):
lsz = max(len(x),len(y),len(z))
if len(x) < lsz:
if len(x) > 0:
a = np.array(x, float) if type(x) is list or tuple else np.array([x], float) if type(x) is float or int else x
b = np.zeros(lsz-len(a))
x = np.concatenate(a,b)
else:
x = np.zeros(lsz)
if len(y) < lsz:
if len(y) > 0:
a = np.array(y, float) if type(y) is list or tuple else np.array([y], float) if type(y) is float or int else y
b = np.zeros(lsz-len(a))
y = np.concatenate(a,b)
else:
y = np.zeros(lsz)
if len(z) < lsz:
if len(z) > 0:
a = np.array(z, float) if type(z) is list or tuple else np.array([z], float) if type(z) is float or int else z
b = np.zeros(lsz-len(a))
z = np.concatenate(a,b)
else:
z = np.zeros(lsz)
posns = np.zeros(lsz, dtype=('f4,f4,f4'))
posns['f0'] = x
posns['f1'] = y
posns['f2'] = z
xs = np.array(x, float) if type(x) is list or tuple else np.array([x], float) if type(x) is float or int else x
ys = np.array(y, float) if type(y) is list or tuple else np.array([y], float) if type(y) is float or int else y
zs = np.array(z, float) if type(z) is list or tuple else np.array([z], float) if type(z) is float or int else z
colors = np.array(color, dtype=('f4,f4,f4')) if type(color) is list else np.array([color], dtype=('f4,f4,f4')) if type(color) is tuple else color
if len(colors) > 0:
reds = colors['f0']
greens = colors['f1']
blues = colors['f2']
elif (len(red) > 0) or (len(green) > 0) or (len(blue) > 0):
lsz = max(len(red),len(green),len(blue))
if len(red) < lsz:
if len(red) > 0:
a = np.array(red, float) if type(red) is list or tuple else np.array([red], float) if type(red) is float or int else red
b = np.zeros(lsz-len(a))
red = np.concatenate(a,b)
else:
red = np.zeros(lsz)
if len(green) < lsz:
if len(green) > 0:
a = np.array(green, float) if type(green) is list or tuple else np.array([green], float) if type(green) is float or int else green
b = np.zeros(lsz-len(a))
green = np.concatenate(a,b)
else:
green = np.zeros(lsz)
if len(blue) < lsz:
if len(blue) > 0:
a = np.array(blue, float) if type(blue) is list or tuple else np.array([blue], float) if type(blue) is float or int else blue
b = np.zeros(lsz-len(a))
blue = np.concatenate(a,b)
else:
blue = np.zeros(lsz)
colors = np.zeros(lsz, dtype=('f4,f4,f4'))
colors['f0'] = red
colors['f1'] = green
colors['f2'] = blue
else:
colors = np.ones(1, dtype=('f4,f4,f4'))
reds = colors['f0']
greens = colors['f1']
blues = colors['f2']
reds = np.array(red, float) if type(red) is list or tuple else np.array([red], float) if type(red) is float or int else red
greens = np.array(green, float) if type(green) is list or tuple else np.array([green], float) if type(green) is float or int else green
blues = np.array(blue, float) if type(blue) is list or tuple else np.array([blue], float) if type(blue) is float or int else blue
pnts = []
cols = []
if len(posns) > 0:
i = 0
col = colors[-1]
for posn in posns:
col = colors[i] if len(colors) > i else colors[-1]
if i >= len(colors):
cols.append(col)
if (len(posn) == 3):
pnts.append({"pos": posn.tolist(), "color": col.tolist()})
elif(len(posn) == 2):
p3 = list(posn)
p3.append(0.0)
p3a = np.array([tuple(p3)], dtype=('f4,f4,f4'))
pnts.append({"pos": p3a[0].tolist(), "color": col.tolist()})
i += 1
if len(cols) > 0:
colors = np.append(colors, np.array(cols, dtype=colors.dtype))
super(points, self).__init__(frame=frame, display=display, visible=visible, **kwargs)
object.__getattribute__(self, 'pos').remove_notification((self.addattr,'pos'))
object.__getattribute__(self, 'size').remove_notification((self.addattr,'size'))
object.__setattr__(self, 'size', size)
object.__setattr__(self, 'size_units', size_units)
object.__setattr__(self, 'shape', shape)
object.__setattr__(self, 'color', colors)
object.__setattr__(self, 'pos', posns)
object.__setattr__(self, 'x', xs)
object.__setattr__(self, 'y', ys)
object.__setattr__(self, 'z', zs)
object.__setattr__(self, 'red', reds)
object.__setattr__(self, 'green', greens)
object.__setattr__(self, 'blue', blues)
cmd = {"cmd": "points", "idx": self.idx, "guid": self.guid,
"attrs": [#{"attr": "pos", "value": self.pos.values()},
#{"attr": "axis", "value": self.axis.values()},
#{"attr": "size", "value": self.size.values()},
#{"attr": "up", "value": self.up.values()},
#{"attr": "color", "value": list(self.color)},
#{"attr": "shininess", "value": self.shininess},
#{"attr": "emissive", "value": self.emissive},
#{"attr": "pnts", "value": [{"pos": [0, 0, 0]}, {"pos": [1, 0, 0]}]},
#{"attr": "pnts", "value": pntsa.tolist()},
{"attr": "visible", "value": self.visible},
{"attr": "pnts", "value": pnts},
{"attr": "size", "value": self.size},
{"attr": "size_units", "value": self.size_units},
{"attr": "canvas", "value": self.display.idx if self.display != None else canvas.get_selected().idx if canvas.get_selected() != None else -1}]}
self.appendcmd(cmd)
def __setattr__(self, name, value):
if name in ['pos','color','x','y','z','red','green','blue','size','size_units','shape']:
if name == 'size':
self.__dict__[name] = value
self.addattr(name)
elif name == 'size_units':
if value in ['pixels','world']:
self.__dict__[name] = value
self.addattr(name)
elif name == 'shape':
if value in ['round','square']:
self.__dict__[name] = value
self.addattr(name)
elif name == 'pos':
if type(value) is list:
for idx, val in enumerate(value):
if type(val) is not tuple:
value[idx] = astuple(val)
self.__dict__[name] = np.array(value, dtype=('f4,f4,f4')) if type(value) is list and (len(value) == 0 or len(value[0]) == 3) else np.array(value, dtype=('f4,f4')) if type(value) is list and len(value[0]) == 2 else value
self.__dict__['x'] = self.pos['f0']
self.__dict__['y'] = self.pos['f1']
if len(value[0]) == 3:
self.__dict__['z'] = self.pos['f2']
cmd = {"cmd": "modify", "idx": self.idx,
"attrs":[{"attr": 'posns', "value": self.pos.tolist()}]}
baseObj.cmds.append(cmd)
else:
posns = []
if len(self.pos) > 0:
for posn in self.pos:
p3 = list(posn)
p3.append(0.0)
posns.append(tuple(p3))
posns2 = np.array(posns, dtype=('f4,f4,f4'))
cmd = {"cmd": "modify", "idx": self.idx,
"attrs":[{"attr": 'posns', "value": posns2.tolist()}]}
baseObj.cmds.append(cmd)
elif name == 'x':
self.__dict__[name] = np.array(value, float) if type(value) is list or tuple else np.array([value], float) if type(value) is float or int else value
self.__dict__['pos']['f0'] = self.x
cmd = {"cmd": "modify", "idx": self.idx,
"attrs":[{"attr": name, "value": self.x.tolist()}]}
baseObj.cmds.append(cmd)
elif name == 'y':
self.__dict__[name] = np.array(value, float) if type(value) is list or tuple else np.array([value], float) if type(value) is float or int else value
self.__dict__['pos']['f1'] = self.y
cmd = {"cmd": "modify", "idx": self.idx,
"attrs":[{"attr": name, "value": self.y.tolist()}]}
baseObj.cmds.append(cmd)
elif name == 'z':
self.__dict__[name] = np.array(value, float) if type(value) is list or tuple else np.array([value], float) if type(value) is float or int else value
self.__dict__['pos']['f2'] = self.z
cmd = {"cmd": "modify", "idx": self.idx,
"attrs":[{"attr": name, "value": self.z.tolist()}]}
baseObj.cmds.append(cmd)
elif name == 'red':
self.__dict__[name] = np.array(value, float) if type(value) is list or tuple else np.array([value], float) if type(value) is float or int else value
self.__dict__['color']['f0'] = self.red
cmd = {"cmd": "modify", "idx": self.idx,
"attrs":[{"attr": name, "value": self.red.tolist()}]}
baseObj.cmds.append(cmd)
elif name == 'green':
self.__dict__[name] = np.array(value, float) if type(value) is list or tuple else np.array([value], float) if type(value) is float or int else value
self.__dict__['color']['f1'] = self.green
cmd = {"cmd": "modify", "idx": self.idx,
"attrs":[{"attr": name, "value": self.green.tolist()}]}
baseObj.cmds.append(cmd)
elif name == 'blue':
self.__dict__[name] = np.array(value, float) if type(value) is list or tuple else np.array([value], float) if type(value) is float or int else value
self.__dict__['color']['f2'] = self.blue
cmd = {"cmd": "modify", "idx": self.idx,
"attrs":[{"attr": name, "value": self.blue.tolist()}]}
baseObj.cmds.append(cmd)
elif name == 'color':
self.__dict__[name] = np.array(value, dtype=('f4,f4,f4')) if type(value) is list and (len(value) == 0 or len(value[0]) == 3) else np.array(value, dtype=('f4,f4')) if type(value) is list and len(value[0]) == 2 else value
self.__dict__['red'] = self.color['f0']
self.__dict__['green'] = self.color['f1']
self.__dict__['blue'] = self.color['f2']
cmd = {"cmd": "modify", "idx": self.idx,
"attrs":[{"attr": 'colors', "value": self.color.tolist()}]}
baseObj.cmds.append(cmd)
else:
super(points, self).__setattr__(name, value)
def __del__(self):
pass
def append(self, pos = None, color = None, red = None, green = None, blue = None):
if (red is not None) and (green is not None) and (blue is not None):
color = (red,green,blue)
if (pos is not None) and (color is not None):
if type(pos) is not tuple:
pos = astuple(pos)
self.__dict__['pos'] = np.append(self.pos, np.array([pos], dtype=self.pos.dtype))
self.__dict__['color'] = np.append(self.color, np.array([color], dtype=self.color.dtype))
pos = list(pos)
if len(pos) == 2:
pos.append(0.0)
cmd = {"cmd": "push", "idx": self.idx,
"attrs":[{"attr": "pos", "value": pos},{"attr": "color", "value": list(color)}]}
baseObj.cmds.append(cmd)
elif (pos is not None):
if type(pos) is not tuple:
pos = astuple(pos)
self.__dict__['pos'] = np.append(self.pos, np.array([pos], dtype=self.pos.dtype))
color = self.color[-1]
self.__dict__['color'] = np.append(self.color, np.array([color], dtype=self.color.dtype))
pos = list(pos)
if len(pos) == 2:
pos.append(0.0)
cmd = {"cmd": "push", "idx": self.idx,
"attrs":[{"attr": "pos", "value": pos},{"attr": "color", "value": self.color[-1].tolist()}]}
baseObj.cmds.append(cmd)
class faces(baseAttrs2):
"""see faces documentation at http://vpython.org/contents/docs/faces.html"""
xs = np.array([],float)
ys = np.array([],float)
zs = np.array([],float)
def __init__(self, pos=[], x=[], y=[], z=[], axis=(1.,0.,0.), radius=0., display=None, visible=True,
up=(0.,1.,0.), color=[], red=[], green=[], blue=[], normal=[], frame=None, material=None, **kwargs):
if type(pos) is list:
for idx, val in enumerate(pos):
if type(val) is not tuple:
pos[idx] = tuple(val)
posns = np.array(pos, dtype=('f4,f4,f4')) if type(pos) is list and (len(pos) == 0 or len(pos[0]) == 3) else np.array(pos, dtype=('f4,f4')) if type(pos) is list and len(pos[0]) == 2 else pos
if len(posns) > 0:
xs = posns['f0']
ys = posns['f1']
if(len(posns[0]) == 3):
zs = posns['f2']
else:
zs = np.zeros(len(posns))
elif (len(x) > 0) or (len(y) > 0) or (len(z) > 0):
lsz = max(len(x),len(y),len(z))
if len(x) < lsz:
if len(x) > 0:
a = np.array(x, float) if type(x) is list or tuple else np.array([x], float) if type(x) is float or int else x
b = np.zeros(lsz-len(a))
x = np.concatenate(a,b)
else:
x = np.zeros(lsz)
if len(y) < lsz:
if len(y) > 0:
a = np.array(y, float) if type(y) is list or tuple else np.array([y], float) if type(y) is float or int else y
b = np.zeros(lsz-len(a))
y = np.concatenate(a,b)
else:
y = np.zeros(lsz)
if len(z) < lsz:
if len(z) > 0:
a = np.array(z, float) if type(z) is list or tuple else np.array([z], float) if type(z) is float or int else z
b = np.zeros(lsz-len(a))
z = np.concatenate(a,b)
else:
z = np.zeros(lsz)
posns = np.zeros(lsz, dtype=('f4,f4,f4'))
posns['f0'] = x
posns['f1'] = y
posns['f2'] = z
xs = np.array(x, float) if type(x) is list or tuple else np.array([x], float) if type(x) is float or int else x
ys = np.array(y, float) if type(y) is list or tuple else np.array([y], float) if type(y) is float or int else y
zs = np.array(z, float) if type(z) is list or tuple else np.array([z], float) if type(z) is float or int else z
colors = np.array(color, dtype=('f4,f4,f4')) if type(color) is list else np.array([color], dtype=('f4,f4,f4')) if type(color) is tuple else color
if len(colors) > 0:
reds = colors['f0']
greens = colors['f1']
blues = colors['f2']
elif (len(red) > 0) or (len(green) > 0) or (len(blue) > 0):
lsz = max(len(red),len(green),len(blue))
if len(red) < lsz:
if len(red) > 0:
a = np.array(red, float) if type(red) is list or tuple else np.array([red], float) if type(red) is float or int else red
b = np.zeros(lsz-len(a))
red = np.concatenate(a,b)
else:
red = np.zeros(lsz)
if len(green) < lsz:
if len(green) > 0:
a = np.array(green, float) if type(green) is list or tuple else np.array([green], float) if type(green) is float or int else green
b = np.zeros(lsz-len(a))
green = np.concatenate(a,b)
else:
green = np.zeros(lsz)
if len(blue) < lsz:
if len(blue) > 0:
a = np.array(blue, float) if type(blue) is list or tuple else np.array([blue], float) if type(blue) is float or int else blue
b = np.zeros(lsz-len(a))
blue = np.concatenate(a,b)
else:
blue = np.zeros(lsz)
colors = np.zeros(lsz, dtype=('f4,f4,f4'))
colors['f0'] = red
colors['f1'] = green
colors['f2'] = blue
else:
colors = np.ones(1, dtype=('f4,f4,f4'))
reds = colors['f0']
greens = colors['f1']
blues = colors['f2']
reds = np.array(red, float) if type(red) is list or tuple else np.array([red], float) if type(red) is float or int else red
greens = np.array(green, float) if type(green) is list or tuple else np.array([green], float) if type(green) is float or int else green
blues = np.array(blue, float) if type(blue) is list or tuple else np.array([blue], float) if type(blue) is float or int else blue
normals = np.array(normal, dtype=('f4,f4,f4')) if type(normal) is list and (len(normal) == 0 or len(normal[0]) == 3) else np.array(normal, dtype=('f4,f4')) if type(normal) is list and len(normal[0]) == 2 else normal
pnts = []
cols = []
if len(posns) > 0:
i = 0
col = colors[-1]
for posn in posns:
col = colors[i] if len(colors) > i else colors[-1]
if i >= len(colors):
cols.append(col)
if (len(posn) == 3):
pnts.append({"pos": posn.tolist(), "color": col.tolist()})
elif(len(posn) == 2):
p3 = list(posn)
p3.append(0.0)
p3a = np.array([tuple(p3)], dtype=('f4,f4,f4'))
pnts.append({"pos": p3a[0].tolist(), "color": col.tolist()})
i += 1
if len(cols) > 0:
colors = np.append(colors, np.array(cols, dtype=colors.dtype))
super(faces, self).__init__(axis=axis, up=up, material=material, display=display, visible=visible, **kwargs)
object.__getattribute__(self, 'pos').remove_notification((self.addattr,'pos'))
object.__setattr__(self, 'radius', radius)
object.__setattr__(self, 'color', colors)
object.__setattr__(self, 'pos', posns)
object.__setattr__(self, 'normal', posns)
object.__setattr__(self, 'x', xs)
object.__setattr__(self, 'y', ys)
object.__setattr__(self, 'z', zs)
object.__setattr__(self, 'red', reds)
object.__setattr__(self, 'green', greens)
object.__setattr__(self, 'blue', blues)
object.__setattr__(self, 'frame', frame)
cmd = {"cmd": "faces", "idx": self.idx, "guid": self.guid,
"attrs": [#{"attr": "pos", "value": self.pos.values()},
#{"attr": "axis", "value": self.axis.values()},
#{"attr": "size", "value": self.size.values()},
#{"attr": "up", "value": self.up.values()},
#{"attr": "color", "value": list(self.color)},
#{"attr": "shininess", "value": self.shininess},
#{"attr": "emissive", "value": self.emissive},
#{"attr": "pnts", "value": [{"pos": [0, 0, 0]}, {"pos": [1, 0, 0]}]},
#{"attr": "pnts", "value": pntsa.tolist()},
{"attr": "pnts", "value": pnts},
{"attr": "radius", "value": self.radius},
{"attr": "canvas", "value": self.display.idx if self.display != None else canvas.get_selected().idx if canvas.get_selected() != None else -1}]}
"""
if (baseObj.glow != None):
baseObj.glow.comm.send([cmd])
else:
self.appendcmd(cmd)
#self.appendcmd(cmd)
"""
def __setattr__(self, name, value):
if name in ['pos','color','x','y','z','red','green','blue','radius']:
if name == 'radius':
self.__dict__[name] = vector(value) if type(value) is tuple else value
cmd = {"idx": self.idx, "attr": "radius", "val": self.radius}
baseObj.cmds.append(cmd)
elif name == 'pos':
self.__dict__[name] = np.array(value, dtype=('f4,f4,f4')) if type(value) is list and (len(value) == 0 or len(value[0]) == 3) else np.array(value, dtype=('f4,f4')) if type(value) is list and len(value[0]) == 2 else value
self.__dict__['x'] = self.pos['f0']
self.__dict__['y'] = self.pos['f1']
if len(value[0]) == 3:
self.__dict__['z'] = self.pos['f2']
cmd = {"cmd": "modify", "idx": self.idx,
"attrs":[{"attr": 'posns', "value": self.pos.tolist()}]}
baseObj.cmds.append(cmd)
else:
posns = []
if len(self.pos) > 0:
for posn in self.pos:
p3 = list(posn)
p3.append(0.0)
posns.append(tuple(p3))
posns2 = np.array(posns, dtype=('f4,f4,f4'))
cmd = {"cmd": "modify", "idx": self.idx,
"attrs":[{"attr": 'posns', "value": posns2.tolist()}]}
baseObj.cmds.append(cmd)
elif name == 'x':
self.__dict__[name] = np.array(value, float) if type(value) is list or tuple else np.array([value], float) if type(value) is float or int else value
self.__dict__['pos']['f0'] = self.x
cmd = {"cmd": "modify", "idx": self.idx,
"attrs":[{"attr": name, "value": self.x.tolist()}]}
baseObj.cmds.append(cmd)
elif name == 'y':
self.__dict__[name] = np.array(value, float) if type(value) is list or tuple else np.array([value], float) if type(value) is float or int else value
self.__dict__['pos']['f1'] = self.y
cmd = {"cmd": "modify", "idx": self.idx,
"attrs":[{"attr": name, "value": self.y.tolist()}]}
baseObj.cmds.append(cmd)
elif name == 'z':
self.__dict__[name] = np.array(value, float) if type(value) is list or tuple else np.array([value], float) if type(value) is float or int else value
self.__dict__['pos']['f2'] = self.z
cmd = {"cmd": "modify", "idx": self.idx,
"attrs":[{"attr": name, "value": self.z.tolist()}]}
baseObj.cmds.append(cmd)
elif name == 'red':
self.__dict__[name] = np.array(value, float) if type(value) is list or tuple else np.array([value], float) if type(value) is float or int else value
self.__dict__['color']['f0'] = self.red
cmd = {"cmd": "modify", "idx": self.idx,
"attrs":[{"attr": name, "value": self.red.tolist()}]}
baseObj.cmds.append(cmd)
elif name == 'green':
self.__dict__[name] = np.array(value, float) if type(value) is list or tuple else np.array([value], float) if type(value) is float or int else value
self.__dict__['color']['f1'] = self.green
cmd = {"cmd": "modify", "idx": self.idx,
"attrs":[{"attr": name, "value": self.green.tolist()}]}
baseObj.cmds.append(cmd)
elif name == 'blue':
self.__dict__[name] = np.array(value, float) if type(value) is list or tuple else np.array([value], float) if type(value) is float or int else value
self.__dict__['color']['f2'] = self.blue
cmd = {"cmd": "modify", "idx": self.idx,
"attrs":[{"attr": name, "value": self.blue.tolist()}]}
baseObj.cmds.append(cmd)
elif name == 'color':
self.__dict__[name] = np.array(value, dtype=('f4,f4,f4')) if type(value) is list and (len(value) == 0 or len(value[0]) == 3) else np.array(value, dtype=('f4,f4')) if type(value) is list and len(value[0]) == 2 else value
self.__dict__['red'] = self.color['f0']
self.__dict__['green'] = self.color['f1']
self.__dict__['blue'] = self.color['f2']
cmd = {"cmd": "modify", "idx": self.idx,
"attrs":[{"attr": 'colors', "value": self.color.tolist()}]}
baseObj.cmds.append(cmd)
else:
super(faces, self).__setattr__(name, value)
def append(self, pos = None, normal = None, color = None, red = None, green = None, blue = None):
"""
Usage:
f.append(pos=(x,y,z))
f.append(pos=(x,y,z), normal=(nx,ny,nz))
f.append(pos=(x,y,z), normal=(nx,ny,nz), color=(r,g,b))
f.append(pos=(x,y,z), normal=(nx,ny,nz), red=r, green=g, blue=b)
"""
if (red is not None) and (green is not None) and (blue is not None):
color = (red,green,blue)
if (pos is not None) and (normal is not None) and (color is not None):
self.__dict__['pos'] = np.append(self.pos, np.array([pos], dtype=self.pos.dtype))
self.__dict__['normal'] = np.append(self.normal, np.array([normal], dtype=self.normal.dtype))
self.__dict__['color'] = np.append(self.color, np.array([color], dtype=self.color.dtype))
#cmd = {"cmd": "push", "idx": self.idx,
# "attrs":[{"attr": "pos", "value": list(pos)},{"attr": "normal", "value": list(normal)},{"attr": "color", "value": list(color)}]}
#baseObj.cmds.append(cmd)
elif (pos is not None) and (normal is not None):
self.__dict__['pos'] = np.append(self.pos, np.array([pos], dtype=self.pos.dtype))
self.__dict__['normal'] = np.append(self.normal, np.array([normal], dtype=self.normal.dtype))
#color = self.color[-1]
#self.__dict__['color'] = np.append(self.color, np.array([color], dtype=self.color.dtype))
#cmd = {"cmd": "push", "idx": self.idx,
# "attrs":[{"attr": "pos", "value": list(pos)},{"attr": "normal", "value": list(normal)},{"attr": "color", "value": self.color[-1].tolist()}]}
#baseObj.cmds.append(cmd)
elif (pos is not None):
self.__dict__['pos'] = np.append(self.pos, np.array([pos], dtype=self.pos.dtype))
def make_normals(self):
# for triangle with vertices abc, (b-a).cross(c-b).norm() will be perpendicular to triangle
pass
def make_twosided(self):
pass
def smooth(self, angle = 0.95):
pass
def __del__(self):
pass
class faces2(baseAttrs2):
def __init__(self, pos=[], color=[], normal=[], red=[1.], green=[1.], blue=[1.], material=None, frame=None, visible=True, display=None, **kwargs):
posns = np.array(pos, dtype=('f4,f4,f4')) if type(pos) is list else pos
normals = np.array(normal, dtype=('f4,f4,f4')) if type(pos) is list else normal
colors = np.array(color, dtype=('f4,f4,f4')) if type(color) is list else np.array([color], dtype=('f4,f4,f4')) if type(color) is tuple else color
reds = np.array(red, float) if type(red) is list else np.array([red], float) if type(red) is float or int else red
greens = np.array(green, float) if type(green) is list else np.array([green], float) if type(green) is float or int else green
blues = np.array(blue, float) if type(blue) is list else np.array([blue], float) if type(blue) is float or int else blue
super(faces, self).__init__(**kwargs)
object.__setattr__(self, 'frame', frame)
object.__setattr__(self, 'display', display)
pnts = []
if len(posns) > 0:
i = 0
#col = np.array([(1.,1.,1.)], dtype=('f4,f4,f4'))[-1]
col = colors[-1]
for posn in posns:
col = colors[i] if len(colors) > i else col
"""
if len(colors) > i:
pnts.append({"pos": posn.tolist(), "color": col.tolist()})
else:
pnts.append({"pos": posn.tolist()})
"""
pnts.append({"pos": posn.tolist(), "color": col.tolist()})
i += 1
cmd = {"cmd": "faces", "idx": self.idx,
"attrs": [#{"attr": "pos", "value": self.pos.values()},
#{"attr": "axis", "value": self.axis.values()},
#{"attr": "size", "value": self.size.values()},
#{"attr": "up", "value": self.up.values()},
#{"attr": "color", "value": list(self.color)},
#{"attr": "shininess", "value": self.shininess},
#{"attr": "emissive", "value": self.emissive},
#{"attr": "pnts", "value": [{"pos": [0, 0, 0]}, {"pos": [1, 0, 0]}]},
#{"attr": "pnts", "value": pntsa.tolist()},
{"attr": "pnts", "value": pnts},
{"attr": "radius", "value": self.radius},
{"attr": "canvas", "value": self.display.idx if self.display != None else canvas.get_selected().idx if canvas.get_selected() != None else -1}]}
if (baseObj.glow != None):
baseObj.glow.comm.send([cmd])
else:
self.appendcmd(cmd)
#self.appendcmd(cmd)
"""
if len(posns) > 0:
i = 0
col = (1.,1.,1.)
for posn in posns:
col = colors[i] if len(colors) > i else col
cmd2 = {"cmd": "push", "idx": self.idx,
"attrs":[{"attr": "pos", "value": list(posn)},
{"attr": "color", "value": list(col)}]}
i += 1
self.appendcmd(cmd2)
#baseObj.cmds.append(cmd)
"""
def __setattr__(self, name, value):
if name in ['frame','display']:
self.__dict__[name] = vector(value) if type(value) is tuple else value
if name == 'radius':
cmd = {"idx": self.idx, "attr": "radius", "val": self.radius}
baseObj.cmds.append(cmd)
elif name == 'axis':
cmd = {"idx": self.idx, "attr": name, "val": self.axis.values()}
baseObj.cmds.append(cmd)
else:
super(faces, self).__setattr__(name, value)
def append(self, pos = None, color = None, normal = None, red = None, green = None, blue = None):
# need to implement this
if (red is not None) and (green is not None) and (blue is not None):
color = (red,green,blue)
if (pos is not None) and (color is not None):
# self.__dict__['colors'] = np.append(self.colors,np.array([color], dtype=('f4,f4,f4'))[0],axis=0)
#y = np.append(self.colors, np.array([color], dtype=self.colors.dtype))
#self.colors = y
self.colors = np.append(self.colors, np.array([color], dtype=self.colors.dtype))
cmd = {"cmd": "push", "idx": self.idx,
"attrs":[{"attr": "pos", "value": list(pos)},{"attr": "color", "value": list(color)}]}
baseObj.cmds.append(cmd)
elif (pos is not None):
cmd = {"cmd": "push", "idx": self.idx,
"attrs":[{"attr": "pos", "value": list(pos)},{"attr": "color", "value": self.colors[-1].tolist()}]}
#"attrs":[{"attr": "pos", "value": list(pos)}]}
baseObj.cmds.append(cmd)
class helix(baseAttrs2):
"""see helix documentation at http://vpython.org/contents/docs/helix.html"""
def __init__(self, pos=(0.,0.,0.), x=0., y=0., z=0., axis=(1.,0.,0.), length=-1., radius=1., thickness=0., coils=5,
up=(0.,1.,0.), color=(1.,1.,1.), red=1., green=1., blue=1., frame=None, visible=True, display=None, material=None, **kwargs):
axis = vector(axis) if type(axis) in [tuple, list, np.ndarray] else axis
if (length == -1.):
length = axis.mag()
else:
axis = axis.norm() * length
if (thickness == 0.):
thickness = radius/20.
size = vector(length,radius*2,radius*2)
super(helix, self).__init__(pos=pos, x=x, y=y, z=z, axis=axis, size=size, up=up, color=color, red=red, green=green,
blue=blue, material=material, frame=frame, display=display, visible=visible, **kwargs)
object.__getattribute__(self, 'size').remove_notification((self.addattr,'size'))
object.__setattr__(self, 'length', length)
object.__setattr__(self, 'radius', radius)
object.__setattr__(self, 'thickness', thickness)
object.__setattr__(self, 'coils', coils)
cmd = {"cmd": "helix", "idx": self.idx, "guid": self.guid,
"attrs": [{"attr": "pos", "value": self.pos.values()},
{"attr": "axis", "value": self.axis.values()},
{"attr": "size", "value": self.size.values()},
{"attr": "up", "value": self.up.values()},
{"attr": "color", "value": list(self.color)},
{"attr": "thickness", "value": self.thickness},
{"attr": "coils", "value": self.coils},
{"attr": "visible", "value": self.visible},
{"attr": "canvas", "value": self.display.idx if self.display != None else canvas.get_selected().idx if canvas.get_selected() != None else -1}]}
self.appendcmd(cmd)
def __setattr__(self, name, value):
if name in ['length','radius','thickness','coils','size']:
val = value if type(value) is vector else vector(value) if type(value) in [tuple, list, np.ndarray] else value
self.__dict__[name] = val
if name == 'length':
self.__dict__['axis'] = self.axis.norm() * value
self.addattr('axis')
elif name == 'radius':
self.addattr('size')
elif name == 'thickness':
self.addattr(name)
elif name == 'coils':
self.addattr(name)
elif name == 'size':
"""VPython helix does not hava a size attribute but Glowscript helix does"""
pass
else:
super(helix, self).__setattr__(name, value)
def __getattribute__(self, name):
if name in ['size','length']:
if name == 'length':
return object.__getattribute__(self, 'axis').mag()
elif name == 'size':
return vector(object.__getattribute__(self, 'axis').mag(), object.__getattribute__(self, 'radius')*2.0, object.__getattribute__(self, 'radius')*2.0)
else:
return super(helix, self).__getattribute__(name)
def __del__(self):
cmd = {"cmd": "delete", "idx": self.idx}
self.appendcmd(cmd)
#baseObj.cmds.append(cmd)
super(helix, self).__del__()
class arrow(trailAttrs):
"""see arrow documentation at http://vpython.org/contents/docs/arrow.html"""
def __init__(self, pos=(0.,0.,0.), x=0., y=0., z=0., axis=(1.,0.,0.), length=-1., shaftwidth=0., headwidth=0., headlength=0., fixedwidth=False,
frame=None, up=(0.,1.,0.), color=(1.,1.,1.), red=1., green=1., blue=1., material=None, opacity=1.0,
display=None, visible=True, make_trail=False, trail_type="curve", interval=10, retain=50, **kwargs):
axis = vector(axis) if type(axis) in [tuple, list, np.ndarray] else axis
shaftwidth_provided = headwidth_provided = headlength_provided = True
if (length == -1.):
length = axis.mag()
else:
axis = axis.norm() * length
if (shaftwidth == 0.):
shaftwidth_provided = False
shaftwidth = 0.1*length
if (headwidth == 0.):
headwidth_provided = False
headwidth = 2.*shaftwidth
if (headlength == 0.):
headlength_provided = False
headlength = 3.*shaftwidth
super(arrow, self).__init__(pos=pos, x=x, y=y, z=z, axis=axis, up=up, color=color, red=red, green=green, blue=blue,
material=material, opacity=opacity, frame=frame, display=display, visible=visible,
make_trail=make_trail, trail_type=trail_type, interval=interval, retain=retain, **kwargs)
object.__setattr__(self, 'length', length)
object.__setattr__(self, 'shaftwidth', shaftwidth)
object.__setattr__(self, 'headwidth', headwidth)
object.__setattr__(self, 'headlength', headlength)
if ((shaftwidth_provided == True) or (headwidth_provided == True) or (headlength_provided == True)):
cmd = {"cmd": "arrow", "idx": self.idx, "guid": self.guid,
"attrs": [{"attr": "pos", "value": self.pos.values()},
{"attr": "axis_and_length", "value": self.axis.values()},
{"attr": "up", "value": self.up.values()},
{"attr": "color", "value": list(self.color)},
{"attr": "opacity", "value": self.opacity},
{"attr": "canvas", "value": self.display.idx if self.display != None else canvas.get_selected().idx if canvas.get_selected() != None else -1},
{"attr": "visible", "value": self.visible},
{"attr": "make_trail", "value": self.make_trail},
{"attr": "type", "value": 'curve' if self.trail_type == 'curve' else 'spheres'},
{"attr": "interval", "value": self.interval},
{"attr": "shaftwidth", "value": self.shaftwidth},
{"attr": "headwidth", "value": self.headwidth},
{"attr": "headlength", "value": self.headlength},
{"attr": "retain", "value": self.retain}]}
self.appendcmd(cmd)
else:
cmd = {"cmd": "arrow", "idx": self.idx, "guid": self.guid,
"attrs": [{"attr": "pos", "value": self.pos.values()},
{"attr": "axis_and_length", "value": self.axis.values()},
{"attr": "up", "value": self.up.values()},
{"attr": "color", "value": list(self.color)},
{"attr": "opacity", "value": self.opacity},
{"attr": "canvas", "value": self.display.idx if self.display != None else canvas.get_selected().idx if canvas.get_selected() != None else -1},
{"attr": "visible", "value": self.visible},
{"attr": "make_trail", "value": self.make_trail},
{"attr": "type", "value": 'curve' if self.trail_type == 'curve' else 'spheres'},
{"attr": "interval", "value": self.interval},
{"attr": "retain", "value": self.retain}]}
self.appendcmd(cmd)
"""
if (shaftwidth_provided == True):
self.shaftwidth = shaftwidth
if (headwidth_provided == True):
self.headwidth = headwidth
if (headlength_provided == True):
self.headlength = headlength
"""
object.__getattribute__(self, 'axis').remove_notification((self.addattr,'axis'))
object.__getattribute__(self, 'axis').add_notification((self.addattr,'axis_and_length'))
if (frame != None):
frame.objects.append(self)
frame.update_obj_list()
def __setattr__(self, name, value):
if name in ['length','axis','shaftwidth','headwidth','headlength','fixedwidth']:
if name == 'axis':
self.__dict__['axis'].remove_notification((self.addattr,'axis_and_length'))
val = value if type(value) is vector else vector(value) if type(value) in [tuple, list, np.ndarray] else value
self.__dict__[name] = val
if name == 'length':
self.__dict__['axis'] = self.axis.norm() * value
self.addattr('axis_and_length')
elif name == 'axis':
self.__dict__['axis'].add_notification((self.addattr,'axis_and_length'))
self.addattr('axis_and_length')
elif name == 'shaftwidth':
self.addattr(name)
elif name == 'headwidth':
self.addattr(name)
elif name == 'headlength':
self.addattr(name)
else:
super(arrow, self).__setattr__(name, value)
def __getattribute__(self, name):
if name in ['length', 'axis_and_length']:
if name == 'length':
return object.__getattribute__(self, 'axis').mag()
elif name == 'axis_and_length':
return object.__getattribute__(self, 'axis')
else:
return super(arrow, self).__getattribute__(name)
def __del__(self):
cmd = {"cmd": "delete", "idx": self.idx}
self.appendcmd(cmd)
object.__getattribute__(self, 'axis').remove_notification((self.addattr,'axis_and_length'))
super(arrow, self).__del__()
class cylinder(trailAttrs):
"""see cylinder documentation at http://vpython.org/contents/docs/cylinder.html"""
def __init__(self, pos=(0.,0.,0.), x=0., y=0., z=0., axis=(1.,0.,0.), length=-1., radius=1.,
frame=None, up=(0.,1.,0.), color=(1.,1.,1.), red=1., green=1., blue=1., material=None, opacity=1.0,
display=None, visible=True, make_trail=False, trail_type="curve", interval=10, retain=50, **kwargs):
axis = vector(axis) if type(axis) in [tuple, list, np.ndarray] else axis
if (length == -1.):
length = axis.mag()
else:
axis = axis.norm() * length
size = vector(length,radius*2,radius*2)
super(cylinder, self).__init__(pos=pos, x=x, y=y, z=z, axis=axis, size=size, up=up, color=color, red=red, green=green, blue=blue,
material=material, opacity=opacity, frame=frame, display=display, visible=visible,
make_trail=make_trail, trail_type=trail_type, interval=interval, retain=retain, **kwargs)
object.__setattr__(self, 'length', length)
object.__setattr__(self, 'radius', radius)
cmd = {"cmd": "cylinder", "idx": self.idx, "guid": self.guid,
"attrs": [{"attr": "pos", "value": self.pos.values()},
{"attr": "axis", "value": self.axis.values()},
{"attr": "size", "value": self.size.values()},
{"attr": "up", "value": self.up.values()},
{"attr": "color", "value": list(self.color)},
{"attr": "opacity", "value": self.opacity},
{"attr": "shininess", "value": self.shininess},
{"attr": "emissive", "value": self.emissive},
{"attr": "canvas", "value": self.display.idx if self.display != None else canvas.get_selected().idx if canvas.get_selected() != None else -1},
{"attr": "make_trail", "value": self.make_trail},
{"attr": "type", "value": 'curve' if self.trail_type == 'curve' else 'spheres'},
{"attr": "interval", "value": self.interval},
{"attr": "visible", "value": self.visible},
{"attr": "retain", "value": self.retain}]}
self.appendcmd(cmd)
if (frame != None):
frame.objects.append(self)
frame.update_obj_list()
def __setattr__(self, name, value):
if name in ['length','radius']:
val = value if type(value) is vector else vector(value) if type(value) in [tuple, list, np.ndarray] else value
self.__dict__[name] = val
if name == 'length':
self.__dict__['axis'] = self.axis.norm() * value
self.addattr('axis')
elif name == 'radius':
self.addattr('size')
else:
super(cylinder, self).__setattr__(name, value)
def __getattribute__(self, name):
if name in ['size','length']:
if name == 'length':
return object.__getattribute__(self, 'axis').mag()
elif name == 'size':
return vector(object.__getattribute__(self, 'axis').mag(), object.__getattribute__(self, 'radius')*2.0, object.__getattribute__(self, 'radius')*2.0)
else:
return super(cylinder, self).__getattribute__(name)
def __del__(self):
cmd = {"cmd": "delete", "idx": self.idx}
self.appendcmd(cmd)
super(cylinder, self).__del__()
class pyramid(trailAttrs):
"""see pyramid documentation at http://vpython.org/contents/docs/pyramid.html"""
def __init__(self, pos=(0.,0.,0.), x=0., y=0., z=0., axis=(1.,0.,0.), size=(1.,1.,1.),
length=-1., width=1., height=1., up=(0.,1.,0.), color=(1.,1.,1.), red=1., green=1., blue=1.,
frame=None, material=None, opacity=1.0, display=None, visible=True,
make_trail=False, trail_type="curve", interval=10, retain=50, **kwargs):
axis = vector(axis) if type(axis) in [tuple, list, np.ndarray] else axis
size = vector(size) if type(size) in [tuple, list, np.ndarray] else size
if (length == -1.):
if size[0] == 1. and size[1] == 1. and size[2] == 1.:
length = axis.mag()
size[0] = length
else:
length = size[0]
height = size[1]
width = size[2]
if (length != 1.0) or (width != 1.0) or (height != 1.0):
size = vector(length,height,width)
else:
length = size[0]
height = size[1]
width = size[2]
axis = axis.norm() * length
super(pyramid, self).__init__(pos=pos, x=x, y=y, z=z, axis=axis, size=size, up=up, color=color, red=red, green=green, blue=blue,
material=material, opacity=opacity, frame=frame, display=display, visible=visible,
make_trail=make_trail, trail_type=trail_type, interval=interval, retain=retain, **kwargs)
object.__setattr__(self, 'length', length)
object.__setattr__(self, 'width', width)
object.__setattr__(self, 'height', height)
cmd = {"cmd": "pyramid", "idx": self.idx, "guid": self.guid,
"attrs": [{"attr": "pos", "value": self.pos.values()},
{"attr": "axis", "value": self.axis.values()},
{"attr": "size", "value": self.size.values()},
{"attr": "up", "value": self.up.values()},
{"attr": "color", "value": list(self.color)},
{"attr": "opacity", "value": self.opacity},
{"attr": "shininess", "value": self.shininess},
{"attr": "emissive", "value": self.emissive},
{"attr": "canvas", "value": self.display.idx if self.display != None else canvas.get_selected().idx if canvas.get_selected() != None else -1},
{"attr": "make_trail", "value": self.make_trail},
{"attr": "type", "value": 'curve' if self.trail_type == 'curve' else 'spheres'},
{"attr": "interval", "value": self.interval},
{"attr": "visible", "value": self.visible},
{"attr": "retain", "value": self.retain}]}
self.appendcmd(cmd)
if (frame != None):
frame.objects.append(self)
frame.update_obj_list()
def __setattr__(self, name, value):
if name in ['length','width','height','size']:
val = value if type(value) is vector else vector(value) if type(value) in [tuple, list, np.ndarray] else value
self.__dict__[name] = val
if name == 'length':
self.__dict__['axis'] = self.axis.norm() * val
self.addattr('axis')
elif name == 'height':
self.addattr('size')
elif name == 'width':
self.addattr('size')
elif name == 'size':
self.__dict__['axis'] = self.axis.norm() * val[0]
self.__dict__['height'] = val[1]
self.__dict__['width'] = val[2]
self.addattr(name)
else:
super(pyramid, self).__setattr__(name, value)
def __getattribute__(self, name):
if name in ['size','length']:
if name == 'length':
return object.__getattribute__(self, 'axis').mag()
elif name == 'size':
return vector(object.__getattribute__(self, 'axis').mag(), object.__getattribute__(self, 'height'), object.__getattribute__(self, 'width'))
else:
return super(pyramid, self).__getattribute__(name)
def __del__(self):
cmd = {"cmd": "delete", "idx": self.idx}
self.appendcmd(cmd)
super(pyramid, self).__del__()
class sphere(trailAttrs):
"""see sphere documentation at http://vpython.org/contents/docs/sphere.html"""
def __init__(self, pos=(0.,0.,0.), x=0., y=0., z=0., axis=(1.,0.,0.), radius=1.0,
frame=None, up=(0.,1.,0.), color=(1.,1.,1.), red=1., green=1., blue=1., material=None, opacity=1.0,
display=None, visible=True, make_trail=False, trail_type="curve", interval=10, retain=50, **kwargs):
size = vector(radius*2,radius*2,radius*2)
super(sphere, self).__init__(pos=pos, x=x, y=y, z=z, axis=axis, size=size, up=up, color=color, red=red, green=green, blue=blue,
material=material, opacity=opacity, frame=frame, display=display, visible=visible,
make_trail=make_trail, trail_type=trail_type, interval=interval, retain=retain, **kwargs)
object.__setattr__(self, 'radius', radius )
object.__setattr__(self, 'display', display )
cmd = {"cmd": "sphere", "idx": self.idx, "guid": self.guid,
"attrs": [{"attr": "pos", "value": self.pos.values()},
{"attr": "axis", "value": self.axis.values()},
{"attr": "size", "value": self.size.values()},
{"attr": "up", "value": self.up.values()},
{"attr": "color", "value": list(self.color)},
{"attr": "opacity", "value": self.opacity},
{"attr": "shininess", "value": self.shininess},
{"attr": "emissive", "value": self.emissive},
{"attr": "canvas", "value": self.display.idx if self.display != None else canvas.get_selected().idx if canvas.get_selected() != None else -1},
{"attr": "make_trail", "value": self.make_trail},
{"attr": "type", "value": 'curve' if self.trail_type == 'curve' else 'spheres'},
{"attr": "interval", "value": self.interval},
{"attr": "visible", "value": self.visible},
{"attr": "retain", "value": self.retain}]}
self.appendcmd(cmd)
if (frame != None):
frame.objects.append(self)
frame.update_obj_list()
def __setattr__(self, name, value):
if name == 'radius':
self.__dict__[name] = value
self.__dict__['size'][0] = value*2
self.__dict__['size'][1] = value*2
self.__dict__['size'][2] = value*2
self.addattr('size')
else:
super(sphere, self).__setattr__(name, value)
def __del__(self):
cmd = {"cmd": "delete", "idx": self.idx}
self.appendcmd(cmd)
super(sphere, self).__del__()
class ellipsoid(trailAttrs):
"""see ellipsoid documentation at http://vpython.org/contents/docs/ellipsoid.html"""
def __init__(self, pos=(0.,0.,0.), x=0., y=0., z=0., axis=(1.,0.,0.), size=(1.,1.,1.),
length=-1., width=1., height=1., up=(0.,1.,0.), color=(1.,1.,1.), red=1., green=1., blue=1.,
frame=None, material=None, opacity=1.0, display=None, visible=True,
make_trail=False, trail_type="curve", interval=10, retain=50, **kwargs):
axis = vector(axis) if type(axis) in [tuple, list, np.ndarray] else axis
size = vector(size) if type(size) in [tuple, list, np.ndarray] else size
if (length == -1.):
if size[0] == 1. and size[1] == 1. and size[2] == 1.:
length = axis.mag()
size[0] = length
else:
length = size[0]
height = size[1]
width = size[2]
if (length != 1.0) or (width != 1.0) or (height != 1.0):
size = vector(length,height,width)
else:
length = size[0]
height = size[1]
width = size[2]
axis = axis.norm() * length
super(ellipsoid, self).__init__(pos=pos, x=x, y=y, z=z, axis=axis, size=size, up=up, color=color, red=red, green=green, blue=blue,
material=material, opacity=opacity, frame=frame, display=display, visible=visible,
make_trail=make_trail, trail_type=trail_type, interval=interval, retain=retain, **kwargs)
object.__setattr__(self, 'length', length)
object.__setattr__(self, 'width', width)
object.__setattr__(self, 'height', height)
cmd = {"cmd": "ellipsoid", "idx": self.idx, "guid": self.guid,
"attrs": [{"attr": "pos", "value": self.pos.values()},
{"attr": "axis", "value": self.axis.values()},
{"attr": "size", "value": self.size.values()},
{"attr": "up", "value": self.up.values()},
{"attr": "color", "value": list(self.color)},
{"attr": "opacity", "value": self.opacity},
{"attr": "shininess", "value": self.shininess},
{"attr": "emissive", "value": self.emissive},
{"attr": "canvas", "value": self.display.idx if self.display != None else canvas.get_selected().idx if canvas.get_selected() != None else -1},
{"attr": "make_trail", "value": self.make_trail},
{"attr": "type", "value": 'curve' if self.trail_type == 'curve' else 'spheres'},
{"attr": "interval", "value": self.interval},
{"attr": "visible", "value": self.visible},
{"attr": "retain", "value": self.retain}]}
self.appendcmd(cmd)
if (frame != None):
frame.objects.append(self)
frame.update_obj_list()
def __setattr__(self, name, value):
if name in ['length','width','height','size']:
val = value if type(value) is vector else vector(value) if type(value) in [tuple, list, np.ndarray] else value
self.__dict__[name] = val
if name == 'length':
self.__dict__['axis'] = self.axis.norm() * value
self.addattr('axis')
elif name == 'height':
self.addattr('size')
elif name == 'width':
self.addattr('size')
elif name == 'size':
self.__dict__['axis'] = self.axis.norm() * value[0]
self.__dict__['height'] = value[1]
self.__dict__['width'] = value[2]
self.addattr(name)
else:
super(ellipsoid, self).__setattr__(name, value)
def __getattribute__(self, name):
if name in ['size','length']:
if name == 'length':
return object.__getattribute__(self, 'axis').mag()
elif name == 'size':
return vector(object.__getattribute__(self, 'axis').mag(), object.__getattribute__(self, 'height'), object.__getattribute__(self, 'width'))
else:
return super(ellipsoid, self).__getattribute__(name)
def __del__(self):
cmd = {"cmd": "delete", "idx": self.idx}
self.appendcmd(cmd)
super(ellipsoid, self).__del__()
class ring(baseAttrs):
"""see ring documentation at http://vpython.org/contents/docs/ring.html"""
def __init__(self, pos=(0.,0.,0.), x=0., y=0., z=0., axis=(1.,0.,0.),
length=1., radius=1., thickness=0.0, frame=None, display=None, visible=True,
up=(0.,1.,0.), color=(1.,1.,1.), red=1., green=1., blue=1.,
make_trail=False, trail_type="curve", interval=10, retain=50, **kwargs):
if (thickness == 0.0):
thickness = radius/10.0
size = vector(thickness,radius+thickness,radius+thickness)*2.0
super(ring, self).__init__(pos=pos, x=x, y=y, z=z, axis=axis, size=size, up=up, color=color, red=red, green=green,
blue=blue, frame=frame, display=display, visible=visible, **kwargs)
object.__setattr__(self, 'length', length)
object.__setattr__(self, 'radius', radius)
object.__setattr__(self, 'thickness', thickness)
object.__setattr__(self, 'make_trail', make_trail )
object.__setattr__(self, 'trail_type', trail_type )
object.__setattr__(self, 'interval', interval)
object.__setattr__(self, 'retain', retain)
#object.__setattr__(self, 'trail_object', curve() if self.trail_type == "curve" else pnts())
cmd = {"cmd": "ring", "idx": self.idx, "guid": self.guid,
"attrs": [{"attr": "pos", "value": self.pos.values()},
{"attr": "axis", "value": self.axis.values()},
{"attr": "size", "value": self.size.values()},
{"attr": "up", "value": self.up.values()},
{"attr": "color", "value": list(self.color)},
{"attr": "canvas", "value": self.display.idx if self.display != None else canvas.get_selected().idx if canvas.get_selected() != None else -1},
{"attr": "make_trail", "value": self.make_trail},
{"attr": "type", "value": 'curve' if self.trail_type == 'curve' else 'spheres'},
{"attr": "interval", "value": self.interval},
{"attr": "visible", "value": self.visible},
{"attr": "retain", "value": self.retain}]}
self.appendcmd(cmd)
def __setattr__(self, name, value):
if name in ['make_trail','trail_type','interval','retain','length','radius','thickness']:
self.__dict__[name] = value
if name == 'make_trail':
self.addattr(name)
elif name == 'trail_type':
self.addattr(name)
elif name == 'interval':
self.addattr(name)
elif name == 'retain':
self.addattr(name)
elif name == 'radius':
self.__dict__['size'][1] = (value + self.thickness)*2.0
self.__dict__['size'][2] = (value + self.thickness)*2.0
self.addattr('size')
elif name == 'thickness':
self.__dict__['size'][0] = value*2.0
self.addattr('size')
else:
super(ring, self).__setattr__(name, value)
def __del__(self):
cmd = {"cmd": "delete", "idx": self.idx}
self.appendcmd(cmd)
super(ring, self).__del__()
class label(baseAttrs2):
"""see label documentation at http://vpython.org/contents/docs/label.html"""
def __init__(self, pos=(0.,0.,0.), x=0., y=0., z=0., color=(1.,1.,1.), red=1., green=1., blue=1., opacity=0.66,
xoffset=20., yoffset=12., text="", font="sans", height=13, background=(0.,0.,0.),
border=5, box=True, line=True, linecolor=(0.,0.,0.), space=0., display=None, frame=None, visible=True, **kwargs):
# backgraound = scene.background # default background color
# color = scene.foreground # default color
super(label, self).__init__(pos=pos, x=x, y=y, z=z, color=color, red=red, green=green, blue=blue, opacity=opacity,
frame=frame, display=display, visible=visible, **kwargs)
object.__setattr__(self, 'xoffset', xoffset)
object.__setattr__(self, 'yoffset', yoffset)
object.__setattr__(self, 'text', text)
object.__setattr__(self, 'font', font)
object.__setattr__(self, 'height', height)
object.__setattr__(self, 'background', background)
object.__setattr__(self, 'border', border)
object.__setattr__(self, 'box', box)
object.__setattr__(self, 'line', line)
object.__setattr__(self, 'linecolor', linecolor)
object.__setattr__(self, 'space', space)
cmd = {"cmd": "label", "idx": self.idx, "guid": self.guid,
"attrs": [{"attr": "pos", "value": self.pos.values()},
{"attr": "text", "value": self.text},
{"attr": "xoffset", "value": self.xoffset},
{"attr": "yoffset", "value": self.yoffset},
{"attr": "font", "value": self.font},
{"attr": "height", "value": self.height},
{"attr": "color", "value": list(self.color)},
{"attr": "background", "value": list(self.background)},
{"attr": "opacity", "value": self.opacity},
{"attr": "border", "value": self.border},
{"attr": "box", "value": self.box},
{"attr": "line", "value": self.line},
{"attr": "linecolor", "value": list(self.linecolor)},
{"attr": "space", "value": self.space},
{"attr": "visible", "value": self.visible},
{"attr": "canvas", "value": self.display.idx if self.display != None else canvas.get_selected().idx if canvas.get_selected() != None else -1}
]}
self.appendcmd(cmd)
if (frame != None):
frame.objects.append(self)
frame.update_obj_list()
def __setattr__(self, name, value):
if name in ['xoffset','yoffset','text','font','height','background',
'border','box','line','linecolor','space']:
self.__dict__[name] = value
self.addattr(name)
else:
super(label, self).__setattr__(name, value)
class frame(baseAttrs):
"""see frame documentation at http://vpython.org/contents/docs/frame.html"""
objects = []
def __init__(self, pos=(0.,0.,0.), x=0., y=0., z=0., axis=(1.,0.,0.), display=None, visible=True,
up=(0.,1.,0.), color=(1.,1.,1.), red=1., green=1., blue=1., **kwargs):
super(frame, self).__init__(pos=pos, x=x, y=y, z=z, axis=axis, up=up, color=color, red=red, green=green, blue=blue,
display=display,visible=visible,**kwargs)
object.__setattr__(self, 'objects', [])
cmd = {"cmd": "compound", "idx": self.idx, "guid": self.guid,
"attrs": [{"attr": "pos", "value": self.pos.values()},
{"attr": "axis", "value": self.axis.values()},
{"attr": "up", "value": self.up.values()},
{"attr": "color", "value": list(self.color)},
{"attr": "visible", "value": self.visible},
{"attr": "canvas", "value": self.display.idx if self.display != None else canvas.get_selected().idx if canvas.get_selected() != None else -1}]}
self.appendcmd(cmd)
def __setattr__(self, name, value):
super(frame, self).__setattr__(name, value)
def frame_to_world(self, pos):
# need to implement this
return pos
def world_to_frame(self, pos):
# need to implement this
return pos
def update_obj_list(self):
# self.visible = False # we are going to create a new compound in glowscript so remove current one
obj_idxs = []
for obj in self.objects:
obj_idxs.append(obj.idx)
cmd = {"cmd": "compound", "idx": self.idx,
"attrs": [{"attr": "pos", "value": self.pos.values()},
{"attr": "axis", "value": self.axis.values()},
{"attr": "up", "value": self.up.values()},
{"attr": "color", "value": list(self.color)},
{"attr": "obj_idxs", "value": obj_idxs},
{"attr": "canvas", "value": self.display.idx if self.display != None else canvas.get_selected().idx if canvas.get_selected() != None else -1}]}
self.appendcmd(cmd)
class Mouse(object):
'Mouse object'
def __init__(self, pos=(0.,0.,0.), pick=None, pickpos=(0.,0.,0.), camera=None, ray=(0.,0.,1.), alt=False, ctrl=False, shift = False):
self.pos = pos
self.pick = pick
self.pickpos = pickpos
self.camera = camera
self.ray = ray
self.alt = alt
self.ctrl = ctrl
self.shift = shift
def getclick(self):
pass
def project(self, normal=(0,1,0), point=(0,0,0), d=0):
normal = vector(normal) if type(normal) in (tuple, list) else normal
if normal.mag() == 0.:
return None
u_n = normal.norm()
if (d != 0):
point = d*u_n
else:
point = vector(point) if type(point) in (tuple, list) else point
point2 = vector(self.pos) if type(self.pos) in (tuple, list) else self.pos
p = point2 - point
h = p.dot(u_n)
return point2 - h*u_n
class sceneObj(baseObj):
visible = True
foreground = (1,1,1)
background = (0,0,0)
ambient = color.gray(0.2)
stereo = 'redcyan'
stereodepth = 1.
x = 0.
y = 0.
height = 500
width = 800
title = ""
fullscreen = False
exit = True
center = vector(0,0,0)
autocenter = True
forward = vector(0,0,-1)
fov = math.pi/3.
range = (1.,1.,1.)
scale = (1.,1.,1.)
up = vector(0.,1.,0.)
autoscale = True
userzoom = True
userspin = True
lights = []
objects = []
def __init__(self, visible=True, foreground=(1,1,1), background=(0,0,0), ambient=color.gray(0.2), stereo='redcyan',
stereodepth=1., x=0., y=0., height=480, width=640, title="", fullscreen=False,
exit=True, center=(0.,0.,0.), autocenter=True, forward=(0.,0.,-1.), fov=math.pi/3.,
range=(1.,1.,1.), scale=(1.,1.,1.), up=(0.,1.,0.), autoscale=True, userzoom=True, userspin=True, **kwargs):
super(sceneObj, self).__init__(**kwargs)
rate.active = False
if isinstance(range, (int, long, float)):
range = (range,range,range)
if isinstance(scale, (int, long, float)):
scale = (scale,scale,scale)
if (range[0] != 1.) and (range[0] != 0.):
scale[0] = 1./range[0]
if (scale[0] != 1.) and (scale[0] != 0.):
range[0] = 1./scale[0]
object.__setattr__(self, 'objects', [])
object.__setattr__(self, 'visible', visible)
object.__setattr__(self, 'foreground', foreground)
object.__setattr__(self, 'background', background)
object.__setattr__(self, 'ambient', ambient)
object.__setattr__(self, 'stereo', stereo)
object.__setattr__(self, 'stereodepth', stereodepth)
object.__setattr__(self, 'x', x)
object.__setattr__(self, 'y', y)
object.__setattr__(self, 'height', height)
object.__setattr__(self, 'width', width)
object.__setattr__(self, 'title', title)
object.__setattr__(self, 'fullscreen', fullscreen)
object.__setattr__(self, 'exit', exit)
object.__setattr__(self, 'autocenter', autocenter)
object.__setattr__(self, 'forward', vector(forward) if type(forward) in [tuple, list, np.ndarray] else forward)
object.__setattr__(self, 'fov', fov)
object.__setattr__(self, 'range', vector(range) if type(range) is tuple else range)
object.__setattr__(self, 'scale', vector(scale) if type(scale) is tuple else scale)
object.__setattr__(self, 'up', vector(up) if type(up) in [tuple, list, np.ndarray] else up)
object.__setattr__(self, 'center', vector(center) if type(center) in [tuple, list, np.ndarray] else center)
object.__setattr__(self, 'autoscale', autoscale)
object.__setattr__(self, 'userzoom', userzoom)
object.__setattr__(self, 'userspin', userspin)
object.__setattr__(self, 'mouse', Mouse())
object.__getattribute__(self, 'forward').add_notification((self.addattr,'forward'))
object.__getattribute__(self, 'range').add_notification((self.addattr,'range'))
object.__getattribute__(self, 'scale').add_notification((self.addattr,'scale'))
object.__getattribute__(self, 'up').add_notification((self.addattr,'up'))
object.__getattribute__(self, 'center').add_notification((self.addattr,'center'))
def __setattr__(self, name, value):
if name == 'mouse':
self.__dict__[name] = value
elif name in ['visible','foreground','background','ambient','stereo','stereodepth','x','y',
'height','width','title','fullscreen','exit','center','autocenter',
'forward','fov','range','scale','up','autoscale','userzoom','userspin']:
if name in ['forward','range','scale','up','center']:
self.__dict__[name].remove_notification((self.addattr,name))
if name in ['foreground','background','ambient']:
self.__dict__[name] = value
elif name in ['scale','range']:
if isinstance(value, (int, long, float)):
value = (value,value,value)
self.__dict__[name] = vector(value) if type(value) is tuple else value
self.__dict__[name].add_notification((self.addattr,name))
elif name in ['up','center','forward']:
self.__dict__[name] = vector(value) if type(value) in [tuple, list, np.ndarray] else value
self.__dict__[name].add_notification((self.addattr,name))
else:
self.__dict__[name] = vector(value) if type(value) is tuple else value
if name in ['background','ambient','height','width','center',
'forward','fov','range','scale','up','autoscale','userzoom','userspin']:
cmd = {}
if name == 'background':
self.addattr(name)
elif name == 'ambient':
self.addattr(name)
elif name == 'center':
self.addattr(name)
elif name == 'forward':
self.addattr(name)
elif name == 'range':
self.addattr(name)
if (self.range[0] != 0.):
self.__dict__['scale'][0] = 1./self.range[0]
elif name == 'scale':
self.addattr(name)
if (self.scale[0] != 0.):
self.__dict__['range'][0] = 1./self.scale[0]
else:
self.addattr(name)
else:
super(sceneObj, self).__setattr__(name, value)
"""
def __getattribute__(self, name):
if name in ['range','scale']:
if name == 'range':
return object.__getattribute__(self, 'range')[0]
elif name == 'scale':
if object.__getattribute__(self, 'range')[0] != 0.0:
return 1.0/object.__getattribute__(self, 'range')[0]
else:
return 1.0
else:
return super(sceneObj, self).__getattribute__(name)
"""
def bind(self, *args):
cmd = {"cmd": "bind", "idx": self.idx, "selector": '#' + self.sceneId + ' canvas', "sceneguid": self.guid}
if callable(args[1]):
cmd['events'] = args[0]
guid = str(uuid.uuid4())
callback_registry[guid] = args[1]
cmd['callback'] = guid
if inspect.isfunction(args[1]):
cmd['events'] = self.evtns(args[0],args[1].__name__) # add func name namespace to events
if len(args) > 2:
obj = args[2]
if type(obj) in [str, int, long, bool, float, tuple, complex]:
cmd['arbArg'] = obj
elif isinstance(obj, collections.Sequence):
cmd['arbArg'] = self.encode_seq(obj)
elif isinstance(obj, baseObj):
cmd['arbArg'] = {'guido': obj.guid}
else:
cmd['arbArg'] = args[2]
self.appendcmd(cmd)
def unbind(self, *args):
cmd = {"cmd": "unbind", "idx": self.idx, "selector": '#' + self.sceneId + ' canvas'}
if callable(args[1]):
cmd['events'] = args[0]
if inspect.isfunction(args[1]):
cmd['events'] = self.evtns(args[0],args[1].__name__) # add func name namespace to events
self.appendcmd(cmd)
def evtns(self,strs, ns):
evts = strs.split()
for i, evt in enumerate(evts):
evts[i] = evt + "." + ns
ns_evts = " ".join(evts)
return ns_evts
def encode_seq(self,seq):
if type(seq) is list:
for i, item in enumerate(seq):
if isinstance(item, baseObj):
seq[i] = {'guido': item.guid}
return seq
if type(seq) is tuple:
seq2 = list(seq)
for i, item in enumerate(seq2):
if isinstance(item, baseObj):
seq2[i] = {'guido': item.guid}
return seq2
return []
def __del__(self):
for attr in ['forward','range','scale','up','center']:
object.__getattribute__(self, attr).remove_notification((self.addattr,attr))
super(sceneObj, self).__del__()
class canvas(sceneObj):
sceneCnt = 0
selected_display = -1
displays = []
display_idx = 0
def __init__(self, visible=True, foreground=(1,1,1), background=(0,0,0), ambient=color.gray(0.2), stereo='redcyan',
stereodepth=1., x=0., y=0., height=480, width=640, title="", fullscreen=False,
exit=True, center=(0.,0.,0.), autocenter=True, forward=(0.,0.,-1.), fov=math.pi/3.,
range=1., scale=1., up=(0.,1.,0.), autoscale=True, userzoom=True, userspin=True, **kwargs):
super(canvas, self).__init__(visible=visible, foreground=foreground, background=background, ambient=ambient, stereo=stereo,
stereodepth=stereodepth, x=x, y=y, height=height, width=width, title=title, fullscreen=fullscreen,
exit=exit, center=center, autocenter=autocenter, forward=forward, fov=fov,
range=range, scale=scale, up=up, autoscale=autoscale, userzoom=userzoom, **kwargs)
object.__setattr__(self, 'display_index', canvas.display_idx)
object.__setattr__(self, 'sceneId', "scene%d" % (canvas.sceneCnt))
canvas.displays.append(self)
canvas.selected_display = canvas.display_idx
canvas.display_idx += 1
canvas.sceneCnt += 1
#object.__setattr__(self, 'sceneId', "scene%d" % (canvas.sceneCnt))
try:
scene
except NameError:
display(HTML("""<div id="%s"><div id="glowscript" class="glowscript"></div></div>""" % (self.sceneId)))
display(Javascript("""window.__context = { glowscript_container: $("#glowscript").removeAttr("id")}"""))
else:
pass
display(HTML("""<div id="%s"><div id="glowscript" class="glowscript"></div></div>""" % (self.sceneId)))
display(Javascript("""window.__context = { glowscript_container: $("#glowscript").removeAttr("id")}"""))
cmd = {"cmd": "canvas", "idx": self.idx, "guid": self.guid,
"attrs": [{"attr": "visible", "value": self.visible},
{"attr": "title", "value": self.title},
{"attr": "background", "value": list(self.background)},
{"attr": "ambient", "value": list(self.ambient)},
{"attr": "height", "value": self.height},
{"attr": "width", "value": self.width},
{"attr": "forward", "value": self.forward.values()},
{"attr": "fov", "value": self.fov},
{"attr": "range", "value": self.range[0]},
{"attr": "up", "value": self.up.values()},
{"attr": "center", "value": self.center.values()},
{"attr": "autoscale", "value": self.autoscale},
{"attr": "userzoom", "value": self.userzoom},
{"attr": "userspin", "value": self.userspin}
]}
self.appendcmd(cmd)
def __setattr__(self, name, value):
super(canvas, self).__setattr__(name, value)
def __getattribute__(self, name):
return super(canvas, self).__getattribute__(name)
def select(self):
canvas.selected_display = self.display_index
@classmethod
def get_selected(cls):
return cls.displays[cls.selected_display] if cls.selected_display >= 0 else None
def _ipython_display_(self):
display_html('<div id="glowscript2" ><div id="glowscript" class="glowscript"></div></div>', raw=True)
cmd = {"cmd": "redisplay", "idx": self.idx, "sceneId": self.sceneId}
self.appendcmd(cmd)
class idisplay(canvas):
def __init__(self, visible=True, foreground=(1,1,1), background=(0,0,0), ambient=color.gray(0.2), stereo='redcyan',
stereodepth=1., x=0., y=0., height=480, width=640, title="", fullscreen=False,
exit=True, center=(0.,0.,0.), autocenter=True, forward=(0.,0.,-1.), fov=math.pi/3.,
range=1., scale=1., up=(0.,1.,0.), autoscale=True, userzoom=True, userspin=True, **kwargs):
super(idisplay, self).__init__(visible=visible, foreground=foreground, background=background, ambient=ambient, stereo=stereo,
stereodepth=stereodepth, x=x, y=y, height=height, width=width, title=title, fullscreen=fullscreen,
exit=exit, center=center, autocenter=autocenter, forward=forward, fov=fov,
range=range, scale=scale, up=up, autoscale=autoscale, userzoom=userzoom, **kwargs)
def __setattr__(self, name, value):
super(idisplay, self).__setattr__(name, value)
class defaultscene(sceneObj):
def __init__(self):
super(defaultscene, self).__init__()
object.__setattr__(self, 'sceneId', "scene0")
cmd = {"cmd": "scene", "idx": self.idx}
self.appendcmd(cmd)
def __setattr__(self, name, value):
super(defaultscene, self).__setattr__(name, value)
def __getattribute__(self, name):
return super(defaultscene, self).__getattribute__(name)
def _ipython_display_(self):
display_html('<div id="glowscript2" ><div id="glowscript" class="glowscript"></div></div>', raw=True)
cmd = {"cmd": "redisplay", "idx": self.idx, "sceneId": self.sceneId}
self.appendcmd(cmd)
class local_light(baseObj):
"""see lighting documentation at http://vpython.org/contents/docs/lights.html"""
def __init__(self, pos=(0.,0.,0.), color=(1.,1.,1.), frame=None, display=None, **kwargs):
super(local_light, self).__init__(**kwargs)
object.__setattr__(self, 'pos', vector(pos) if type(pos) is tuple else pos)
object.__setattr__(self, 'color', color)
object.__setattr__(self, 'display', display)
object.__setattr__(self, 'frame', frame)
cmd = {"cmd": "local_light", "idx": self.idx, "guid": self.guid,
"attrs": [{"attr": "pos", "value": self.pos.values()},
{"attr": "color", "value": list(self.color)},
{"attr": "canvas", "value": self.display.idx if self.display != None else canvas.get_selected().idx if canvas.get_selected() != None else -1}
]}
if (canvas.get_selected() != None):
canvas.get_selected().lights.append(self)
self.appendcmd(cmd)
def __setattr__(self, name, value):
if name in ['pos']:
self.__dict__[name] = vector(value) if type(value) is tuple else value
cmd = {}
if name == 'pos':
cmd = {"idx": self.idx, "attr": name, "val": self.pos.values()}
baseObj.cmds.append(cmd)
elif name == 'color':
self.__dict__[name] = value
cmd = {"idx": self.idx, "attr": name, "val": list(self.color)}
baseObj.cmds.append(cmd)
else:
super(local_light, self).__setattr__(name, value)
class distant_light(baseObj):
"""see lighting documentation at http://vpython.org/contents/docs/lights.html"""
def __init__(self, direction=(0.,0.,0.), color=(1.,1.,1.), frame=None, display=None, **kwargs):
super(distant_light, self).__init__(**kwargs)
object.__setattr__(self, 'direction', vector(direction) if type(direction) is tuple else direction)
object.__setattr__(self, 'color', color)
object.__setattr__(self, 'display', display)
object.__setattr__(self, 'frame', frame)
cmd = {"cmd": "distant_light", "idx": self.idx, "guid": self.guid,
"attrs": [{"attr": "direction", "value": self.direction.values()},
{"attr": "color", "value": list(self.color)},
{"attr": "canvas", "value": self.display.idx if self.display != None else canvas.get_selected().idx if canvas.get_selected() != None else -1}
]}
if (canvas.get_selected() != None):
canvas.get_selected().lights.append(self)
self.appendcmd(cmd)
def __setattr__(self, name, value):
if name in ['direction']:
self.__dict__[name] = vector(value) if type(value) is tuple else value
cmd = {}
if name == 'direction':
cmd = {"idx": self.idx, "attr": name, "val": self.direction.values()}
baseObj.cmds.append(cmd)
elif name == 'color':
self.__dict__[name] = value
cmd = {"idx": self.idx, "attr": name, "val": list(self.color)}
baseObj.cmds.append(cmd)
else:
super(distant_light, self).__setattr__(name, value)
scene = None
for i in range(10):
if (baseObj.glow != None):
break
else:
time.sleep(1.0)
#if IPython.__version__ >= '3.0.0' :
with glowlock:
get_ipython().kernel.do_one_iteration()
scene = defaultscene() | PypiClean |
/GLAM-0.3.7-py3-none-any.whl/glam/parsing/_dataset.py | import tensorflow as tf
import pandas as pd
import numpy as np
import string, re
import os
import random
from glam.utils import lookups, typo
from glam.utils.utils import choose
labels_list = [
'blank',
'building_name',
'level',
'unit',
'first_number',
'first_number_suffix',
'second_number',
'street_name',
'suburb_town_city',
'postcode',
]
# Number of labels in total (+1 for the blank category)
n_labels = len(labels_list)
# Allowable characters for the encoded representation
vocab = list(string.digits + string.ascii_lowercase + string.punctuation + string.whitespace)
def vocab_lookup(characters):
"""
Takes in a string and returns an list of encoded vocab indices
Inputs:
characters: the character string to be encoded
Outputs:
return: list of integer vocab indices
"""
result = list()
for c in characters.lower():
try:
result.append(vocab.index(c) + 1)
except ValueError:
pass
# result.append(len(vocab)+1)
return np.array(result)
def labels(text, field_name, mutate = True):
"""
Takes a string and label class and creates label list. Typos are applied if mutate == True
Inputs:
text: the text to label
field_name: name of the class to use as label
mutate: whether to apply typos or not. Default is to apply typos
Outputs:
text and list of labels
"""
# Ensure the input is a string, encoding None to an empty to string
if text is None:
text = ''
else:
# Introduce artificial typos if mutate == True
text = typo.generate_typo(str(text)) if mutate else str(text)
encoded = [labels_list.index(field_name)]*len(text)
return text, encoded
def join_labels(lbls, sep= " "):
"""
Concatenates a series of label matrices with a separator
:param lbls: a list of labels matrices
:param sep: the separator string or function that returns the sep string
:return: the concatenated labels
"""
if len(lbls) < 2:
return lbls
joined_labels = None
sep_str = None
# if `sep` is not a function, set the separator (`sep_str`) to `sep`, otherwise leave as None
if not callable(sep):
sep_str = sep
for l in lbls:
if joined_labels is None:
joined_labels = l
else:
# If `sep` is a function, call it on each iteration
if callable(sep):
sep_str = sep()
# Skip zero-length labels
if len(l) == 0:
continue
elif sep_str is not None and len(sep_str) > 0 and len(joined_labels) > 0:
# Join using sep_str if it's present and non-zero in length
joined_labels = np.concatenate([joined_labels, labels(sep_str, 'blank', mutate=False)[1], l], axis=0)
else:
# Otherwise, directly concatenate the labels
joined_labels = np.concatenate([joined_labels, l], axis=0)
return joined_labels
def join_str_and_labels(parts, sep = " "):
"""
Joins the strings and labels using the given separator
:param parts: a list of string/label tuples
:param sep: a string or function that returns the string to be used as a separator
:return: the joined string and labels
"""
# Keep only the parts with strings of length > 0
parts = [p for p in parts if len(p[0]) > 0]
# If there are no parts at all, return an empty string an array of shape (0, n_labels)
if len(parts) == 0:
return '', np.zeros((0, n_labels))
# If there's only one part, just give it back as-is
elif len(parts) == 1:
return parts[0]
# Pre-generate the separators - this is important if `sep` is a function returning non-deterministic results
n_sep = len(parts) - 1
if callable(sep):
seps = [sep() for _ in range(n_sep)]
else:
seps = [sep] * n_sep
seps += ['']
# Join the strings using the list of separators
strings = ''.join(sum([(s[0][0], s[1]) for s in zip(parts, seps)], ()))
# Join the labels using an iterator function
sep_iter = iter(seps)
lbls = join_labels([s[1] for s in parts], sep=lambda: next(sep_iter))
assert len(strings) == lbls.shape[0], "string length %i (%s), label length %i using sep %s" % (
len(strings), strings, lbls.shape[0], seps)
return strings, lbls
def generate_building_name(s):
# do some random stuff and return as label
if len(s) == 0:
return labels(s,'building_name')
def add_building(s):
if random.getrandbits(1):
s += ' ' + random.choice(lookups.dwelling_types)
else:
s += random.choice(lookups.dwelling_types) + ' '
return s
s = choose(lambda: add_building(s), lambda : s)
return labels(s,'building_name')
def generate_level(s):
if len(s) == 0:
return labels(s,'level')
def number_first(s):
if s =='0':
return random.choice(lookups.level_types)
else:
r = random.choice(['ordinal','ordinal_words'])
return lookups.num2word(s,r) + ' ' + random.choice(['level','lvl','floor','flr'])
def number_last(s):
if s =='0':
return random.choice(['floor','level','lv','flr','floor']) + ' ' + random.choice(['0','zero'])
else:
return random.choice(['level','lvl','lv','floor','flr']) + ' ' + random.choice([lookups.num2word(s,'cardinal'),s])
s = choose(lambda : number_first(s), lambda : number_last(s))
return labels(s,'level')
def generate_unit(s):
if len(s) == 0:
return labels(s,'unit')
def add_dwelling_type(s):
return random.choice(lookups.dwelling_types) + ' ' + s
s = choose(lambda : add_dwelling_type(s), lambda : s, chance1 = 0.5)
return labels(s,'unit')
def generate_first_number(s):
return labels(s,'first_number')
def generate_first_number_suffix(s):
return labels(s,'first_number_suffix')
def generate_second_number(s):
return labels(s,'second_number')
def generate_street_name(s):
if len(s) == 0:
return labels(s,'street_name')
# abbreviate street suffix
def abbreviate_street(s):
for k in lookups.street_abbreviations_reversed.keys():
if k.lower() in s.lower():
s = re.sub(k+'$',lookups.street_abbreviations_reversed[k],s,flags = re.IGNORECASE)
continue
return s
s = choose(lambda : abbreviate_street(s), lambda : s)
return labels(s,'street_name')
def generate_suburb_town_city(s):
def remove_vowels(s):
return re.sub(r'[aeiou]','',s,flags=re.IGNORECASE)
words = s.split(' ')
new_word = []
for word in words:
new_word.append(choose(lambda : remove_vowels(word), lambda : word, chance1 = 0.05))
words = ' '.join(new_word)
if np.random.uniform() < 0.6:
words.replace(',','')
return labels(words,'suburb_town_city')
def generate_postcode(s):
return labels(s,'postcode')
def random_separator(min_length = 1, max_length = 3, possible_sep_chars = r",.-/\ "):
"""
Generates a space-padded separator of random length using a random character from possible_sep_chars
:param min_length: minimum length of the separator
:param max_length: maximum length of the separator
:param possible_sep_chars: string of possible characters to use for the separator
:return: the separator string
"""
chars = [" "] * random.randint(min_length, max_length)
if len(chars) > 0 and possible_sep_chars:
sep_char = random.choice(possible_sep_chars)
chars[random.randrange(len(chars))] = sep_char
return ''.join(chars)
def sythnesise_address(record):
'''
creates randomish address string with labels based on a linz record encoded from vocab and labels list
Inputs:
record: a row from the linz dataset
Outputs:
encoded address and labels
'''
parts = []
to_shuffle = []
# gather all the parts
building = generate_building_name(record['building_name'])
level = generate_level(record['level_number'])
unit = generate_unit(record['unit'])
first_number = generate_first_number(record['address_number'])
first_number_suffix = generate_first_number_suffix(record['first_number_suffix'])
second_number = generate_second_number(record['second_number'])
street = generate_street_name(record['full_road_name_ascii'])
suburb_town_city = generate_suburb_town_city(record['suburb_town_city'])
postcode = generate_postcode(record['postcode'])
# if unit is just a number, combine it with the first number (with a separator), otherwise include it with building name and level
if len(unit[0]) < 3:
first_number = join_str_and_labels([unit,first_number],sep=lambda: random_separator(1, 2, r",._/\ "))
head = [building, level]
else:
head = [building, level, unit]
# include head in random order
random.shuffle(head)
parts += head
#control the house number separators
if len(second_number[0])>0: # if there is a second number, don't use - to separate suffix
seps = r"./\ "
else:
seps = r".-/\ "
first_numbers = join_str_and_labels([first_number, first_number_suffix],
sep=lambda: random_separator(0, 1, seps)
)
street_numbers = join_str_and_labels([first_numbers, second_number],
sep="-"
)
# usually include the street numbers
if np.random.uniform() <= 0.70:
parts.append(street_numbers)
# always include the street
parts.append(street)
tail = [postcode]
choose(lambda : tail.append(suburb_town_city), chance1 = 0.5)
random.shuffle(tail)
parts += tail
address, labels_encoded = join_str_and_labels(parts, sep=lambda: random_separator(1,2, r', '))
address_encoded = vocab_lookup(address)
# use this format for tf1
# return address, text_encoded, address_lbl
# use this format for spacy
# labelled_entities = get_spacy_format(address,address_lbl)
# return address, labelled_entities #length, text_encoded, address_lbl
# for tf2
return address_encoded, labels_encoded
def build_dataset(balanced_linz, outdir, n_records, val_split):
"""
Generates training and validation tfrecord files from balanced linz csv which must be created beforehand
Inputs:
n_records: the number of total (training + validation) rows to create from the balanced linz dataset. defaults to length of linz dataset
val_split: the portion of training data to withhold for validation set
path: the path to store the produced training and validation sets
Outputs:
return: returns None. Saves training and validation tfrecord files to specified path
"""
# load in the balanced linz dataset
linz = pd.read_csv(balanced_linz, dtype=str).fillna('')
# sample from linz dataset
if n_records == 0:
samp = linz
else:
samp = linz.sample(n_records)
# apply randomness and encoding addresses + labels
print(' sythnesising addresses')
samp['train'] = samp.progress_apply(sythnesise_address, axis = 1)
# apply test/val split
val = samp.sample(int(val_split*len(samp)))
train = samp.drop(val.index)
def get_padded_sequences(df):
features = tf.keras.preprocessing.sequence.pad_sequences(df['train'].apply(lambda x : x[0]),padding ='post')
labels = tf.keras.preprocessing.sequence.pad_sequences(df['train'].apply(lambda x : x[1]), padding ='post')
return zip(features, labels)
def write_ds(ds, path):
with tf.io.TFRecordWriter(path) as file_writer:
for x,y in ds:
record_bytes = tf.train.Example(features=tf.train.Features(feature={
"x": tf.train.Feature(int64_list=tf.train.Int64List(value=x)),
"y": tf.train.Feature(int64_list=tf.train.Int64List(value=y)),
})).SerializeToString()
file_writer.write(record_bytes)
# pad encoded addresses and labels
val = get_padded_sequences(val)
train = get_padded_sequences(train)
# write datasets to tfrecords file
if not os.path.exists(outdir):
os.makedirs(outdir)
train_path = os.path.join(outdir, 'train.tfrecord')
val_path = os.path.join(outdir, 'val.tfrecord')
print(' writing train set to tfrecord file as ' + train_path)
write_ds(train,path = train_path)
print(' writing validation set to tfrecord file at ' + val_path)
write_ds(val,path = val_path)
print(' complete') | PypiClean |
/Extremes-1.1.1.zip/Extremes-1.1.1/README.txt | ===============================
"Minimum" and "Maximum" Objects
===============================
The ``peak.util.extremes`` module provides a production-quality implementation
of the ``Min`` and ``Max`` objects from PEP 326. While PEP 326 was rejected
for inclusion in the language or standard library, the objects described in it
are useful in a variety of applications. In PEAK, they have been used to
implement generic functions (in RuleDispatch and PEAK-Rules), as well as to
handle scheduling and time operations in the Trellis. Because this has led to
each project copying the same code, we've now split the module out so it can
be used independently.
Some simple usage examples::
>>> from peak.util.extremes import Min, Max
>>> import sys
>>> Min < -sys.maxint
True
>>> Min < None
True
>>> Min < ''
True
>>> Max > sys.maxint
True
>>> Max > 99999999999999999
True
>>> type(Min)
<class 'peak.util.extremes.Extreme'>
The ``Min`` object compares less than any other object but itself, while the
``Max`` object compares greater than any other object but itself. Both are
instances of the ``Extreme`` type.
While the original PEP 326 implementation of these extreme values is shorter
than the version used here, it contains a flaw: it does not correctly handle
comparisons with classic class instances. Therefore, this version defines
methods for all six rich comparison operators, to ensure correct support for
classic as well as new-style classes::
>>> Max >= Min and Max > Min and Max==Max and Max!=Min
True
>>> Max < Min or Max <= Min or Max==Min or Max!=Max
False
>>> Min <= Max and Min < Max and Min==Min and Min!=Max
True
>>> Min > Max or Min >= Max or Min==Max or Min!=Min
False
>>> class X:
... """Ensure rich comparisons work correctly with classic classes"""
>>> x = X()
>>> Min<x<Max and Min<=x<=Max and Min!=x!=Max and Max!=x!=Min
True
>>> Min>x or x>Max or x<=Min or x>=Max or x==Min or Min==x
False
| PypiClean |
/Abies-0.0.5.tar.gz/Abies-0.0.5/extern/pybind11/docs/advanced/cast/stl.rst | STL containers
##############
Automatic conversion
====================
When including the additional header file :file:`pybind11/stl.h`, conversions
between ``std::vector<>``/``std::deque<>``/``std::list<>``/``std::array<>``/``std::valarray<>``,
``std::set<>``/``std::unordered_set<>``, and
``std::map<>``/``std::unordered_map<>`` and the Python ``list``, ``set`` and
``dict`` data structures are automatically enabled. The types ``std::pair<>``
and ``std::tuple<>`` are already supported out of the box with just the core
:file:`pybind11/pybind11.h` header.
The major downside of these implicit conversions is that containers must be
converted (i.e. copied) on every Python->C++ and C++->Python transition, which
can have implications on the program semantics and performance. Please read the
next sections for more details and alternative approaches that avoid this.
.. note::
Arbitrary nesting of any of these types is possible.
.. seealso::
The file :file:`tests/test_stl.cpp` contains a complete
example that demonstrates how to pass STL data types in more detail.
.. _cpp17_container_casters:
C++17 library containers
========================
The :file:`pybind11/stl.h` header also includes support for ``std::optional<>``
and ``std::variant<>``. These require a C++17 compiler and standard library.
In C++14 mode, ``std::experimental::optional<>`` is supported if available.
Various versions of these containers also exist for C++11 (e.g. in Boost).
pybind11 provides an easy way to specialize the ``type_caster`` for such
types:
.. code-block:: cpp
// `boost::optional` as an example -- can be any `std::optional`-like container
namespace pybind11 { namespace detail {
template <typename T>
struct type_caster<boost::optional<T>> : optional_caster<boost::optional<T>> {};
}}
The above should be placed in a header file and included in all translation units
where automatic conversion is needed. Similarly, a specialization can be provided
for custom variant types:
.. code-block:: cpp
// `boost::variant` as an example -- can be any `std::variant`-like container
namespace pybind11 { namespace detail {
template <typename... Ts>
struct type_caster<boost::variant<Ts...>> : variant_caster<boost::variant<Ts...>> {};
// Specifies the function used to visit the variant -- `apply_visitor` instead of `visit`
template <>
struct visit_helper<boost::variant> {
template <typename... Args>
static auto call(Args &&...args) -> decltype(boost::apply_visitor(args...)) {
return boost::apply_visitor(args...);
}
};
}} // namespace pybind11::detail
The ``visit_helper`` specialization is not required if your ``name::variant`` provides
a ``name::visit()`` function. For any other function name, the specialization must be
included to tell pybind11 how to visit the variant.
.. warning::
When converting a ``variant`` type, pybind11 follows the same rules as when
determining which function overload to call (:ref:`overload_resolution`), and
so the same caveats hold. In particular, the order in which the ``variant``'s
alternatives are listed is important, since pybind11 will try conversions in
this order. This means that, for example, when converting ``variant<int, bool>``,
the ``bool`` variant will never be selected, as any Python ``bool`` is already
an ``int`` and is convertible to a C++ ``int``. Changing the order of alternatives
(and using ``variant<bool, int>``, in this example) provides a solution.
.. note::
pybind11 only supports the modern implementation of ``boost::variant``
which makes use of variadic templates. This requires Boost 1.56 or newer.
Additionally, on Windows, MSVC 2017 is required because ``boost::variant``
falls back to the old non-variadic implementation on MSVC 2015.
.. _opaque:
Making opaque types
===================
pybind11 heavily relies on a template matching mechanism to convert parameters
and return values that are constructed from STL data types such as vectors,
linked lists, hash tables, etc. This even works in a recursive manner, for
instance to deal with lists of hash maps of pairs of elementary and custom
types, etc.
However, a fundamental limitation of this approach is that internal conversions
between Python and C++ types involve a copy operation that prevents
pass-by-reference semantics. What does this mean?
Suppose we bind the following function
.. code-block:: cpp
void append_1(std::vector<int> &v) {
v.push_back(1);
}
and call it from Python, the following happens:
.. code-block:: pycon
>>> v = [5, 6]
>>> append_1(v)
>>> print(v)
[5, 6]
As you can see, when passing STL data structures by reference, modifications
are not propagated back the Python side. A similar situation arises when
exposing STL data structures using the ``def_readwrite`` or ``def_readonly``
functions:
.. code-block:: cpp
/* ... definition ... */
class MyClass {
std::vector<int> contents;
};
/* ... binding code ... */
py::class_<MyClass>(m, "MyClass")
.def(py::init<>())
.def_readwrite("contents", &MyClass::contents);
In this case, properties can be read and written in their entirety. However, an
``append`` operation involving such a list type has no effect:
.. code-block:: pycon
>>> m = MyClass()
>>> m.contents = [5, 6]
>>> print(m.contents)
[5, 6]
>>> m.contents.append(7)
>>> print(m.contents)
[5, 6]
Finally, the involved copy operations can be costly when dealing with very
large lists. To deal with all of the above situations, pybind11 provides a
macro named ``PYBIND11_MAKE_OPAQUE(T)`` that disables the template-based
conversion machinery of types, thus rendering them *opaque*. The contents of
opaque objects are never inspected or extracted, hence they *can* be passed by
reference. For instance, to turn ``std::vector<int>`` into an opaque type, add
the declaration
.. code-block:: cpp
PYBIND11_MAKE_OPAQUE(std::vector<int>);
before any binding code (e.g. invocations to ``class_::def()``, etc.). This
macro must be specified at the top level (and outside of any namespaces), since
it adds a template instantiation of ``type_caster``. If your binding code consists of
multiple compilation units, it must be present in every file (typically via a
common header) preceding any usage of ``std::vector<int>``. Opaque types must
also have a corresponding ``class_`` declaration to associate them with a name
in Python, and to define a set of available operations, e.g.:
.. code-block:: cpp
py::class_<std::vector<int>>(m, "IntVector")
.def(py::init<>())
.def("clear", &std::vector<int>::clear)
.def("pop_back", &std::vector<int>::pop_back)
.def("__len__", [](const std::vector<int> &v) { return v.size(); })
.def("__iter__", [](std::vector<int> &v) {
return py::make_iterator(v.begin(), v.end());
}, py::keep_alive<0, 1>()) /* Keep vector alive while iterator is used */
// ....
.. seealso::
The file :file:`tests/test_opaque_types.cpp` contains a complete
example that demonstrates how to create and expose opaque types using
pybind11 in more detail.
.. _stl_bind:
Binding STL containers
======================
The ability to expose STL containers as native Python objects is a fairly
common request, hence pybind11 also provides an optional header file named
:file:`pybind11/stl_bind.h` that does exactly this. The mapped containers try
to match the behavior of their native Python counterparts as much as possible.
The following example showcases usage of :file:`pybind11/stl_bind.h`:
.. code-block:: cpp
// Don't forget this
#include <pybind11/stl_bind.h>
PYBIND11_MAKE_OPAQUE(std::vector<int>);
PYBIND11_MAKE_OPAQUE(std::map<std::string, double>);
// ...
// later in binding code:
py::bind_vector<std::vector<int>>(m, "VectorInt");
py::bind_map<std::map<std::string, double>>(m, "MapStringDouble");
When binding STL containers pybind11 considers the types of the container's
elements to decide whether the container should be confined to the local module
(via the :ref:`module_local` feature). If the container element types are
anything other than already-bound custom types bound without
``py::module_local()`` the container binding will have ``py::module_local()``
applied. This includes converting types such as numeric types, strings, Eigen
types; and types that have not yet been bound at the time of the stl container
binding. This module-local binding is designed to avoid potential conflicts
between module bindings (for example, from two separate modules each attempting
to bind ``std::vector<int>`` as a python type).
It is possible to override this behavior to force a definition to be either
module-local or global. To do so, you can pass the attributes
``py::module_local()`` (to make the binding module-local) or
``py::module_local(false)`` (to make the binding global) into the
``py::bind_vector`` or ``py::bind_map`` arguments:
.. code-block:: cpp
py::bind_vector<std::vector<int>>(m, "VectorInt", py::module_local(false));
Note, however, that such a global binding would make it impossible to load this
module at the same time as any other pybind module that also attempts to bind
the same container type (``std::vector<int>`` in the above example).
See :ref:`module_local` for more details on module-local bindings.
.. seealso::
The file :file:`tests/test_stl_binders.cpp` shows how to use the
convenience STL container wrappers.
| PypiClean |
/Kloudio-1.0.0.tar.gz/Kloudio-1.0.0/kloudio/models/new_connection.py | import pprint
import re # noqa: F401
import six
from kloudio.configuration import Configuration
class NewConnection(object):
"""
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'name': 'str',
'host': 'str',
'port': 'str',
'database': 'str',
'username': 'str',
'password': 'str',
'connection_type': 'str',
'db_type': 'str',
'production': 'bool',
'ssl': 'bool',
'share_with': 'object',
'enable_tunnel': 'bool',
'tunnel_info': 'object',
'other_options': 'object',
'fed': 'bool',
'dw': 'bool',
'metadata': 'str',
'integration_user_id': 'float',
'web_type': 'str',
'ssl_info': 'object'
}
attribute_map = {
'name': 'name',
'host': 'host',
'port': 'port',
'database': 'database',
'username': 'username',
'password': 'password',
'connection_type': 'connectionType',
'db_type': 'dbType',
'production': 'production',
'ssl': 'ssl',
'share_with': 'shareWith',
'enable_tunnel': 'enableTunnel',
'tunnel_info': 'tunnelInfo',
'other_options': 'otherOptions',
'fed': 'fed',
'dw': 'dw',
'metadata': 'metadata',
'integration_user_id': 'integrationUserId',
'web_type': 'webType',
'ssl_info': 'sslInfo'
}
def __init__(self, name=None, host=None, port=None, database=None, username=None, password=None, connection_type=None, db_type=None, production=None, ssl=None, share_with=None, enable_tunnel=None, tunnel_info=None, other_options=None, fed=None, dw=None, metadata=None, integration_user_id=None, web_type=None, ssl_info=None, local_vars_configuration=None): # noqa: E501
"""NewConnection - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._name = None
self._host = None
self._port = None
self._database = None
self._username = None
self._password = None
self._connection_type = None
self._db_type = None
self._production = None
self._ssl = None
self._share_with = None
self._enable_tunnel = None
self._tunnel_info = None
self._other_options = None
self._fed = None
self._dw = None
self._metadata = None
self._integration_user_id = None
self._web_type = None
self._ssl_info = None
self.discriminator = None
self.name = name
self.host = host
self.port = port
self.database = database
self.username = username
self.password = password
self.connection_type = connection_type
self.db_type = db_type
if production is not None:
self.production = production
if ssl is not None:
self.ssl = ssl
if share_with is not None:
self.share_with = share_with
if enable_tunnel is not None:
self.enable_tunnel = enable_tunnel
if tunnel_info is not None:
self.tunnel_info = tunnel_info
if other_options is not None:
self.other_options = other_options
if fed is not None:
self.fed = fed
if dw is not None:
self.dw = dw
if metadata is not None:
self.metadata = metadata
if integration_user_id is not None:
self.integration_user_id = integration_user_id
if web_type is not None:
self.web_type = web_type
if ssl_info is not None:
self.ssl_info = ssl_info
@property
def name(self):
"""Gets the name of this NewConnection. # noqa: E501
The name of the connection, has to be unique for each user, cannot start with numeric characters. # noqa: E501
:return: The name of this NewConnection. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this NewConnection.
The name of the connection, has to be unique for each user, cannot start with numeric characters. # noqa: E501
:param name: The name of this NewConnection. # noqa: E501
:type name: str
"""
if self.local_vars_configuration.client_side_validation and name is None: # noqa: E501
raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501
self._name = name
@property
def host(self):
"""Gets the host of this NewConnection. # noqa: E501
Host of the connection. To use localhost, please install Kloudio Gateway. # noqa: E501
:return: The host of this NewConnection. # noqa: E501
:rtype: str
"""
return self._host
@host.setter
def host(self, host):
"""Sets the host of this NewConnection.
Host of the connection. To use localhost, please install Kloudio Gateway. # noqa: E501
:param host: The host of this NewConnection. # noqa: E501
:type host: str
"""
if self.local_vars_configuration.client_side_validation and host is None: # noqa: E501
raise ValueError("Invalid value for `host`, must not be `None`") # noqa: E501
self._host = host
@property
def port(self):
"""Gets the port of this NewConnection. # noqa: E501
Port for the connection # noqa: E501
:return: The port of this NewConnection. # noqa: E501
:rtype: str
"""
return self._port
@port.setter
def port(self, port):
"""Sets the port of this NewConnection.
Port for the connection # noqa: E501
:param port: The port of this NewConnection. # noqa: E501
:type port: str
"""
if self.local_vars_configuration.client_side_validation and port is None: # noqa: E501
raise ValueError("Invalid value for `port`, must not be `None`") # noqa: E501
self._port = port
@property
def database(self):
"""Gets the database of this NewConnection. # noqa: E501
Database name # noqa: E501
:return: The database of this NewConnection. # noqa: E501
:rtype: str
"""
return self._database
@database.setter
def database(self, database):
"""Sets the database of this NewConnection.
Database name # noqa: E501
:param database: The database of this NewConnection. # noqa: E501
:type database: str
"""
if self.local_vars_configuration.client_side_validation and database is None: # noqa: E501
raise ValueError("Invalid value for `database`, must not be `None`") # noqa: E501
self._database = database
@property
def username(self):
"""Gets the username of this NewConnection. # noqa: E501
Username used for authentication # noqa: E501
:return: The username of this NewConnection. # noqa: E501
:rtype: str
"""
return self._username
@username.setter
def username(self, username):
"""Sets the username of this NewConnection.
Username used for authentication # noqa: E501
:param username: The username of this NewConnection. # noqa: E501
:type username: str
"""
if self.local_vars_configuration.client_side_validation and username is None: # noqa: E501
raise ValueError("Invalid value for `username`, must not be `None`") # noqa: E501
self._username = username
@property
def password(self):
"""Gets the password of this NewConnection. # noqa: E501
Password used for authentication # noqa: E501
:return: The password of this NewConnection. # noqa: E501
:rtype: str
"""
return self._password
@password.setter
def password(self, password):
"""Sets the password of this NewConnection.
Password used for authentication # noqa: E501
:param password: The password of this NewConnection. # noqa: E501
:type password: str
"""
if self.local_vars_configuration.client_side_validation and password is None: # noqa: E501
raise ValueError("Invalid value for `password`, must not be `None`") # noqa: E501
self._password = password
@property
def connection_type(self):
"""Gets the connection_type of this NewConnection. # noqa: E501
Accepted values: CUSTOMERSUCCESS,PAYMENT,CRM,SOCIAL,NOSQL,CLOUDSTORAGE,WEB,FINANCE,DATABASE,ECOMMERCE,AUTOMATION,ERP # noqa: E501
:return: The connection_type of this NewConnection. # noqa: E501
:rtype: str
"""
return self._connection_type
@connection_type.setter
def connection_type(self, connection_type):
"""Sets the connection_type of this NewConnection.
Accepted values: CUSTOMERSUCCESS,PAYMENT,CRM,SOCIAL,NOSQL,CLOUDSTORAGE,WEB,FINANCE,DATABASE,ECOMMERCE,AUTOMATION,ERP # noqa: E501
:param connection_type: The connection_type of this NewConnection. # noqa: E501
:type connection_type: str
"""
if self.local_vars_configuration.client_side_validation and connection_type is None: # noqa: E501
raise ValueError("Invalid value for `connection_type`, must not be `None`") # noqa: E501
self._connection_type = connection_type
@property
def db_type(self):
"""Gets the db_type of this NewConnection. # noqa: E501
Accepted values: MSSQL,INTERCOM,STRIPE,FACEBOOKPAGES,S4HANA,AIRTABLE,SHOPIFY,FRESHSUCCESS,HUBSPOT,ORACLEFINANCIALSCLOUD,FACEBOOKADS,MYSQL,ORACLE,PGSQL,SALESFORCE,SNOWFLAKE,QUICKBOOKSDESKTOP,GITHUB,SQUARE,QUICKBOOKS,SMARTSHEET,MONGODB,NETSUITE,AZURESQL,XERO,ZAPIER,ZENDESKCHAT,GOOGLEADWORDS,GOOGLEADMANAGER,BIGQUERY,AWSATHENA,DYNAMODB,MARIADB,CUSTOMAPI,JIRA,REDSHIFT,ZENDESKSUPPORT,OUTREACH,ANAPLAN,GOOGLEDRIVE,DROPBOX,AMAZONS3,DATABRICKS # noqa: E501
:return: The db_type of this NewConnection. # noqa: E501
:rtype: str
"""
return self._db_type
@db_type.setter
def db_type(self, db_type):
"""Sets the db_type of this NewConnection.
Accepted values: MSSQL,INTERCOM,STRIPE,FACEBOOKPAGES,S4HANA,AIRTABLE,SHOPIFY,FRESHSUCCESS,HUBSPOT,ORACLEFINANCIALSCLOUD,FACEBOOKADS,MYSQL,ORACLE,PGSQL,SALESFORCE,SNOWFLAKE,QUICKBOOKSDESKTOP,GITHUB,SQUARE,QUICKBOOKS,SMARTSHEET,MONGODB,NETSUITE,AZURESQL,XERO,ZAPIER,ZENDESKCHAT,GOOGLEADWORDS,GOOGLEADMANAGER,BIGQUERY,AWSATHENA,DYNAMODB,MARIADB,CUSTOMAPI,JIRA,REDSHIFT,ZENDESKSUPPORT,OUTREACH,ANAPLAN,GOOGLEDRIVE,DROPBOX,AMAZONS3,DATABRICKS # noqa: E501
:param db_type: The db_type of this NewConnection. # noqa: E501
:type db_type: str
"""
if self.local_vars_configuration.client_side_validation and db_type is None: # noqa: E501
raise ValueError("Invalid value for `db_type`, must not be `None`") # noqa: E501
self._db_type = db_type
@property
def production(self):
"""Gets the production of this NewConnection. # noqa: E501
:return: The production of this NewConnection. # noqa: E501
:rtype: bool
"""
return self._production
@production.setter
def production(self, production):
"""Sets the production of this NewConnection.
:param production: The production of this NewConnection. # noqa: E501
:type production: bool
"""
self._production = production
@property
def ssl(self):
"""Gets the ssl of this NewConnection. # noqa: E501
:return: The ssl of this NewConnection. # noqa: E501
:rtype: bool
"""
return self._ssl
@ssl.setter
def ssl(self, ssl):
"""Sets the ssl of this NewConnection.
:param ssl: The ssl of this NewConnection. # noqa: E501
:type ssl: bool
"""
self._ssl = ssl
@property
def share_with(self):
"""Gets the share_with of this NewConnection. # noqa: E501
:return: The share_with of this NewConnection. # noqa: E501
:rtype: object
"""
return self._share_with
@share_with.setter
def share_with(self, share_with):
"""Sets the share_with of this NewConnection.
:param share_with: The share_with of this NewConnection. # noqa: E501
:type share_with: object
"""
self._share_with = share_with
@property
def enable_tunnel(self):
"""Gets the enable_tunnel of this NewConnection. # noqa: E501
:return: The enable_tunnel of this NewConnection. # noqa: E501
:rtype: bool
"""
return self._enable_tunnel
@enable_tunnel.setter
def enable_tunnel(self, enable_tunnel):
"""Sets the enable_tunnel of this NewConnection.
:param enable_tunnel: The enable_tunnel of this NewConnection. # noqa: E501
:type enable_tunnel: bool
"""
self._enable_tunnel = enable_tunnel
@property
def tunnel_info(self):
"""Gets the tunnel_info of this NewConnection. # noqa: E501
:return: The tunnel_info of this NewConnection. # noqa: E501
:rtype: object
"""
return self._tunnel_info
@tunnel_info.setter
def tunnel_info(self, tunnel_info):
"""Sets the tunnel_info of this NewConnection.
:param tunnel_info: The tunnel_info of this NewConnection. # noqa: E501
:type tunnel_info: object
"""
self._tunnel_info = tunnel_info
@property
def other_options(self):
"""Gets the other_options of this NewConnection. # noqa: E501
:return: The other_options of this NewConnection. # noqa: E501
:rtype: object
"""
return self._other_options
@other_options.setter
def other_options(self, other_options):
"""Sets the other_options of this NewConnection.
:param other_options: The other_options of this NewConnection. # noqa: E501
:type other_options: object
"""
self._other_options = other_options
@property
def fed(self):
"""Gets the fed of this NewConnection. # noqa: E501
:return: The fed of this NewConnection. # noqa: E501
:rtype: bool
"""
return self._fed
@fed.setter
def fed(self, fed):
"""Sets the fed of this NewConnection.
:param fed: The fed of this NewConnection. # noqa: E501
:type fed: bool
"""
self._fed = fed
@property
def dw(self):
"""Gets the dw of this NewConnection. # noqa: E501
:return: The dw of this NewConnection. # noqa: E501
:rtype: bool
"""
return self._dw
@dw.setter
def dw(self, dw):
"""Sets the dw of this NewConnection.
:param dw: The dw of this NewConnection. # noqa: E501
:type dw: bool
"""
self._dw = dw
@property
def metadata(self):
"""Gets the metadata of this NewConnection. # noqa: E501
:return: The metadata of this NewConnection. # noqa: E501
:rtype: str
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""Sets the metadata of this NewConnection.
:param metadata: The metadata of this NewConnection. # noqa: E501
:type metadata: str
"""
self._metadata = metadata
@property
def integration_user_id(self):
"""Gets the integration_user_id of this NewConnection. # noqa: E501
:return: The integration_user_id of this NewConnection. # noqa: E501
:rtype: float
"""
return self._integration_user_id
@integration_user_id.setter
def integration_user_id(self, integration_user_id):
"""Sets the integration_user_id of this NewConnection.
:param integration_user_id: The integration_user_id of this NewConnection. # noqa: E501
:type integration_user_id: float
"""
self._integration_user_id = integration_user_id
@property
def web_type(self):
"""Gets the web_type of this NewConnection. # noqa: E501
:return: The web_type of this NewConnection. # noqa: E501
:rtype: str
"""
return self._web_type
@web_type.setter
def web_type(self, web_type):
"""Sets the web_type of this NewConnection.
:param web_type: The web_type of this NewConnection. # noqa: E501
:type web_type: str
"""
self._web_type = web_type
@property
def ssl_info(self):
"""Gets the ssl_info of this NewConnection. # noqa: E501
:return: The ssl_info of this NewConnection. # noqa: E501
:rtype: object
"""
return self._ssl_info
@ssl_info.setter
def ssl_info(self, ssl_info):
"""Sets the ssl_info of this NewConnection.
:param ssl_info: The ssl_info of this NewConnection. # noqa: E501
:type ssl_info: object
"""
self._ssl_info = ssl_info
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, NewConnection):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, NewConnection):
return True
return self.to_dict() != other.to_dict() | PypiClean |
/HydroSensorReader-1.7.6.tar.gz/HydroSensorReader-1.7.6/hydsensread/file_reader/compagny_file_reader/campbell_cr_file_reader.py | from typing import List, Tuple
__author__ = 'Laptop$'
__date__ = '2018-04-09'
__description__ = " "
__version__ = '1.0'
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
from hydsensread.file_reader.abstract_file_reader import TimeSeriesFileReader, date_list, LineDefinition
VALUES_START = 4
COL_HEADER = 'col_header'
class DATCampbellCRFileReader(TimeSeriesFileReader):
def __init__(self, file_path: str = None, header_length: int = 4):
super().__init__(file_path, header_length)
self.datas = [i.split(',') for i in self.file_content[VALUES_START:]]
@property
def data_header(self):
return self.header_content[COL_HEADER]
def read_file(self):
self._date_list = self._get_date_list()
super().read_file()
def _read_file_header(self):
"""
implementation of the base class abstract method
"""
header_content = [i.replace('"', '') for i in self.file_content[0].split(',')]
print(header_content)
self.sites.site_name = header_content[-1]
self.sites.instrument_serial_number = header_content[3]
def _read_file_data(self):
"""
implementation of the base class abstract method
"""
datas = []
for row in self.datas:
row_content = []
for val in row[2:]:
if val == '"NAN"':
row_content.append(np.nan)
else:
row_content.append(float(val))
datas.append(row_content)
self.records = pd.DataFrame(data=datas,
index=self._date_list,
columns=self.data_header[2:])
self.remove_duplicates()
def _read_file_data_header(self):
"""
implementation of the base class abstract method
"""
start_row = 1
column_name = [i.replace('"', '') for i in self.file_content[start_row].split(',')]
column_unit = [i.replace('"', '') for i in self.file_content[start_row + 1].split(',')]
column_agg = zip(column_name, column_unit)
header_col_def = ['{} ({})'.format(i, j) for i, j in column_agg]
self.header_content[COL_HEADER] = header_col_def
def _get_date_list(self) -> date_list:
dates = []
for i in self.datas:
dates.append(pd.Timestamp(i[0].replace('"', '')))
self.sites.visit_date = dates[-1]
return dates
def _add_common_subplots(self) -> List[LineDefinition]:
outward = 0
out_linedef = []
params_list = [LineDefinition('TDGP1_Avg (mmHg)', 'darkorange', '-'),
LineDefinition('Pression_bridge (psi)', 'red', make_grid=True),
LineDefinition('Pression_bridge_Avg (psi)', 'black', '--', linewidth=0.7),
LineDefinition('CH4 (%)', 'orange', '-', ),
LineDefinition('CH4_Avg (%)', 'brown', '--'),
LineDefinition('Ptot (mbar)', 'blue', '-.', linewidth=0.7)]
for i in params_list:
if i.param in self.records.dtypes.index:
i.outward = outward
out_linedef.append(i)
outward += 50
return out_linedef
def _add_mean_batt_voltage(self, all_axis: List[plt.Axes]) -> List[plt.Axes]:
bat_mean_line_def = LineDefinition('Bat_Volt_mean (volt)')
bat_mean_axe = self._add_first_axis(all_axis[0], bat_mean_line_def)
all_axis.append(bat_mean_axe)
all_axis[0].set_ylim(0, 20)
all_axis[0].set_ylabel('Bat_Volt (volt)', color='black')
return all_axis
def _define_axis_limite_for_pressure_and_ch4(self, all_axis: List[plt.Axes]) -> List[plt.Axes]:
for ax in all_axis:
for lines in ax.lines:
if lines._label in ['Pression_bridge (psi)', 'Pression_bridge_Avg (psi)']:
ax.set_ylim(self.records['Pression_bridge (psi)'].min() - 15,
self.records['Pression_bridge (psi)'].max() + 10)
if lines._label in ['CH4 (%)', 'CH4_Avg (%)']:
ax.set_ylim(0, 105)
return all_axis
def plot(self, main_axis_def: LineDefinition = None, other_axis: List[LineDefinition] = None,
legend_loc='upper left', *args, **kwargs) -> \
Tuple[
plt.Figure, List[plt.Axes]]:
self.records['Bat_Volt_mean (volt)'] = self.records['Bat_Volt (volt)'].resample('D').mean()
self.records['Bat_Volt_mean (volt)'] = self.records['Bat_Volt_mean (volt)'].interpolate()
if main_axis_def is None:
main_axis_def = LineDefinition('Bat_Volt (volt)', 'green', linewidth=0.5)
if other_axis is None:
other_axis = self._add_common_subplots()
fig, all_axis = super().plot(main_axis_def, other_axis, legend_loc, *args, **kwargs)
all_axis = self._add_mean_batt_voltage(all_axis)
all_axis = self._define_axis_limite_for_pressure_and_ch4(all_axis)
return fig, all_axis
if __name__ == '__main__':
import os
path = os.getcwd()
while os.path.split(path)[1] != "hydsensread":
path = os.path.split(path)[0]
file_loc = os.path.join(path, 'file_example')
file_name = "PO-03_F2_XM20170222.dat"
file = os.path.join(file_loc, file_name)
print(file)
campbell_file = DATCampbellCRFileReader(file)
campbell_file.read_file()
print(campbell_file.sites)
# print(campbell_file.records.head())
print(campbell_file.records.describe())
fig, ax = campbell_file.plot(legend_loc='lower right')
plt.show(block=True) | PypiClean |
/Montreal-Forced-Aligner-3.0.0a3.tar.gz/Montreal-Forced-Aligner-3.0.0a3/docs/source/user_guide/workflows/index.rst |
.. _workflows_index:
Workflows available
===================
The primary workflow in MFA is forced alignment, where text is aligned to speech along with phones derived from a pronunciation dictionary and an acoustic model. There are, however, other workflows for transcribing speech using speech-to-text functionality in Kaldi, pronunciation dictionary creation using Pynini, and some basic corpus creation utilities like VAD-based segmentation. Additionally, acoustic models, G2P models, and language models can be trained from your own data (and then used in alignment and other workflows).
.. warning::
Speech-to-text functionality is pretty basic, and the model architecture used in MFA is older GMM-HMM and NGram models, so using something like :xref:`coqui` or Kaldi's ``nnet`` functionality will likely yield better quality transcriptions.
.. hint::
See :ref:`pretrained_models` for details about commands to inspect, download, and save various pretrained MFA models.
.. toctree::
:hidden:
alignment
adapt_acoustic_model
train_acoustic_model
dictionary_generating
g2p_train
| PypiClean |
/ApiLogicServer-9.2.18-py3-none-any.whl/api_logic_server_cli/cli_args_project.py | import api_logic_server_cli.create_from_model.uri_info as uri_info
from api_logic_server_cli.cli_args_base import CliArgsBase
from os.path import abspath
from pathlib import Path
import os
class Project(CliArgsBase): # extend user-visible args with internal values
def __init__(self):
super(Project, self).__init__()
self.os_cwd = os.getcwd()
self.abs_db_url = None
self.nw_db_status = None
""" '', nw, nw+, nw- """
self.project_directory = None
""" string - may have relative /../ """
self.project_directory_actual = None
""" string - no relative /../ """
self.project_directory_path = None
""" Path (project_directory_actual) """
self.merge_into_prototype = None
""" used by codespaces (create project over current) - project_name = ./ """
self.model_gen_bind_msg = False
""" sqlacodegen/codegen msg printed """
self.model_file_name = "models.py"
""" name of models file being processed """
self. default_db = "default = nw.sqlite, ? for help"
self.default_project_name = "ApiLogicProject"
self.default_fab_host = "localhost"
self.default_bind_key_url_separator = "-" # admin
self.is_tutorial = False
self.project_name_last_node = "TBD"
running_at = Path(__file__)
self.api_logic_server_dir_path = running_at.parent.absolute() # no abspath(f'{abspath(get_api_logic_server_dir())}'))
self.is_codespaces = os.getenv('CODESPACES')
def print_options(self):
""" Creating ApiLogicServer with options: (or uri helo) """
if self.db_url == "?": # can only test interactively, not from launch
uri_info.print_uri_info()
exit(0)
print_options = True
if print_options:
print(f'\n\nCreating ApiLogicServer with options:')
print(f' --db_url={self.db_url}')
print(f' --bind_key={self.bind_key}')
print(f' --bind_url_separator={self.bind_key_url_separator}')
print(f' --project_name={self.project_name} (pwd: {self.os_cwd})')
print(f' --api_name={self.api_name}')
print(f' --admin_app={self.admin_app}')
print(f' --react_admin={self.react_admin}')
print(f' --flask_appbuilder={self.flask_appbuilder}')
print(f' --from_git={self.from_git}')
# print(f' --db_types={self.db_types}')
print(f' --run={self.run}')
print(f' --host={self.host}')
print(f' --port={self.port}')
print(f' --swagger_host={self.swagger_host}')
print(f' --not_exposed={self.not_exposed}')
print(f' --open_with={self.open_with}')
print(f' --use_model={self.use_model}')
print(f' --favorites={self.favorites}')
print(f' --non_favorites={self.non_favorites}')
print(f' --extended_builder={self.extended_builder}')
print(f' --multi_api={self.multi_api}')
print(f' --infer_primary_key={self.infer_primary_key}')
print(f' --opt_locking={self.opt_locking}')
print(f' --opt_locking={self.opt_locking_attr}') | PypiClean |
/KratosOptimizationApplication-9.4-cp310-cp310-win_amd64.whl/KratosMultiphysics/OptimizationApplication/responses/base_response.py | from numpy import gradient
import KratosMultiphysics as KM
from KratosMultiphysics import Parameters, Logger
import KratosMultiphysics.OptimizationApplication as KOA
import time as timer
import numpy as np
# ==============================================================================
class BaseResponseFunction:
def __init__(self,response_name, response_settings, model, response_analysis=None):
self.name = response_name
self.response_settings = response_settings
self.model = model
self.analysis = response_analysis
self.analysis_model_part = None
if not response_analysis == None:
self.analysis_model_part = self.analysis._GetSolver().GetComputingModelPart()
self.evaluated_model_parts = response_settings["evaluated_objects"].GetStringArray()
self.controlled_model_parts = response_settings["controlled_objects"].GetStringArray()
self.control_types = response_settings["control_types"].GetStringArray()
def Initialize(self):
for evaluated_model_part in self.evaluated_model_parts:
if not self.analysis == None:
evaluated_model_part_splitted = evaluated_model_part.split(".")
if not evaluated_model_part_splitted[0] == self.analysis_model_part.Name:
raise RuntimeError("BaseResponseFunction:Initialize: root evaluated_model_part {} of response '{}' is not the analysis model!".format(evaluated_model_part_splitted[0],self.name))
if not self.model.HasModelPart(evaluated_model_part):
raise RuntimeError("BaseResponseFunction:Initialize: evaluated_model_part {} of response '{}' does not exist!".format(evaluated_model_part,self.name))
for controlled_model_part in self.controlled_model_parts:
if not self.model.HasModelPart(controlled_model_part):
raise RuntimeError("BaseResponseFunction:Initialize: controlled_model_part {} of response '{}' does not exist!".format(controlled_model_part,self.name))
def CalculateValue(self):
raise RuntimeError("BaseResponseFunction:CalculateValue: Not implemeted ! ")
def GetValue(self):
raise RuntimeError("BaseResponseFunction:GetValue: Not implemeted ! ")
def CalculateGradients(self):
raise RuntimeError("BaseResponseFunction:CalculateGradients: Not implemeted ! ")
def CalculateGradientsForTypeAndObjects(self,control_type,controlled_objects,raise_error=True):
raise RuntimeError("BaseResponseFunction:CalculateGradientsForTypeAndObjects: Not implemeted ! ")
def GetGradients(self):
raise RuntimeError("BaseResponseFunction:GetGradients: Not implemeted ! ")
def GetType(self):
raise RuntimeError("BaseResponseFunction:GetValue: Not implemeted ! ")
def GetVariableName(self):
raise RuntimeError("BaseResponseFunction:GetVariableName: Not implemeted ! ")
def GetGradientsVariablesName(self):
raise RuntimeError("BaseResponseFunction:GetGradientsVariablesName: Not implemeted ! ")
def GetGradientVariableNameForType(self,control_type):
raise RuntimeError("BaseResponseFunction:GetGradientVariableNameForType: Not implemeted ! ") | PypiClean |
/inverterdb-0.1.0-py3-none-any.whl/SSOPInvertorDataBase/gCentralComponentDB.py |
#ERRORS
# -1 -> Could not connect to the central database
# -2 -> Type of argument not correct
# -3 -> Entry/Table does not exist (or the object that is being search for doesn't exist)
# -4 -> Being implemented ( it is in datatypes available but it is yet to be implemented)
# -5 -> Something unexpected
# -6 -> Could not create entry on the database
# -7 ->
# -8 ->
# gBaseDB imports
from .gBaseDB import Base, session
from .gBaseDB import Column,String,Integer, createTable
# Other tables imports
# There are three functions for every table:
# Create new entry on the table, list things on the table and look for a specific ID on the table
from .sInvertorData import listInvertorData, listInvertorDataByID, newInvertorData
#For the user, we need someone to fix bugs so it prints on the terminal a issue solver
EMERGENCY_CONTACT = "Instituto Superior Técnico"
# Tables and datatype implemented or yet to be implemented
# #
dataTypesAvailable = ["invertorData"]
# Declaration of Central Table:
# This table has all the flow of information that comes in and out
#%%
class allPayLoads(Base):
__tablename__ = 'All Payloads'
# This should be uncommented if the tables changes
__table_args__ = {'extend_existing': True}
id = Column(Integer, primary_key=True)
topic = Column(String)
iotDeviceID = Column(String)
dataType = Column(String)
def __repr__(self):
return "All Payloads : {{ sid : %d, topic : %s, IoT Device ID : %s, dataType : %s)}}" % (
self.id, self.topic,
self.iotDeviceID, self.dataType)
#Create a table in the database with the allPayLoads
#Functions from the gBaseDB
createTable()
#
#List Functions: All the funtions that retrieve information from databases
#
#%%
# Retrieve all the data from the central table
def listData():
try:
return session.query(allPayLoads).all()
except:
print("Couldn't return info from database! Contact library owner!")
return -1
# Retrieve all the data from the dataType table passed as argument
def listDataByDataType(dataType):
# Verify if the type of the arguments is correct
if not isinstance(dataType,str):
return -2
else:
#Checks the dataTypes availale array to check if the user is searching for a existing dataType
if not checkDataType(dataType):
return -3
#tries to go to every table searching for the right dataType
try:
if dataType == "invertorData":
return listInvertorData()
else:
print("dataType not valid because function doesn't exist!!! However it is in the dataTypesAvailable")
return -4
except:
print("Couldn't access data on the database")
return -1
# First looks for the entry in the central table
# If success, looks for the entry in the right table (the dataType found).
# If success, retrieves information
def listDataByID(ID):
# Verify if the type of the arguments is correct
if not isinstance(ID,int):
print("Id is not a in the correct format!")
return -2
else:
try:
entry = session.query(allPayLoads).get(ID)
except:
print("Couldn't access to that id! Error in database")
return -1
if entry == None:
print("There is no entry with this ID!")
return -3
try:
if entry.dataType == "invertorData":
return listInvertorDataByID(ID)
else:
print("Something happen when tried to write that ID in the correct table! Not valid!")
return -5
except:
print("Couldn't access data on the database of specific table!")
return -1
# %%
# #
#
# Simple Functions:
# -> Delete one row
# -> Check all the rows in one dataType
# -> Tranform into dict form
#
# #
def deleteEntryByID(id):
session.query(allPayLoads).filter_by(id=id).delete()
session.commit()
def checkDataType(dataType):
for a in dataTypesAvailable:
if a == dataType:
return 1
return 0
def row2dict(row):
d = {}
for column in row.__table__.columns:
d[column.name] = str(getattr(row, column.name))
return d
def table2dict(table):
d = { 'data' : [] }
for i in range(100000000):
try:
d['data'].append(row2dict(table.pop(0)))
except IndexError:
return d
except Exception as e:
print(e)
return -1
return d
#%% New data
# #
#
# Add Rows o the right places in the database
#
#
#To add a new payload, first creates an attemp of access data in the main table,
# Then, if the informatio is correct, it creates the entry in the right table with the dataType given
def newPayload(topic, iotDeviceID, dataType, data):
# Verify if the type of the arguments is correct
if not (isinstance(topic,str) and isinstance(iotDeviceID,str) and isinstance(dataType,str) and isinstance(data,dict)):
print("Type of argument/arguments is not correct!")
return -2
else:
if not checkDataType(dataType):
print("dataType argument does not exist! Try an existing dataType!")
return -2
newPayload = allPayLoads(topic = topic, iotDeviceID = iotDeviceID, dataType = dataType )
session.add(newPayload)
try:
session.commit()
except:
session.rollback()
print("Could not create that entry on main database do to database error! Contact {}.".format(EMERGENCY_CONTACT))
return -6
data['id'] = newPayload.id
try:
if dataType == "invertorData":
result = newInvertorData(data)
if 0 > result:
deleteEntryByID(data['id'])
else:
print("Data type not valid because functions not implemented!!! However it is in the available dataTypes. Contact {%s}".format(EMERGENCY_CONTACT) )
return -4
except Exception as e:
deleteEntryByID(data['id'])
print("Couldn't write data on the database. Database error!")
return -1
return result | PypiClean |
/KratosSwimmingDEMApplication-9.2.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl/KratosMultiphysics/SwimmingDEMApplication/daitche_quadrature/radii_error_plotter.py | import os
import matplotlib.pyplot as plt
import itertools
import math
def flip(items, ncol):
return itertools.chain(*[items[i::ncol] for i in range(ncol)])
dir = os.path.dirname(__file__)
my_paths = []
my_labels = []
my_Nqs = []
my_file_path = os.path.join(dir, 'Daitche_1_order_2Post_Files/radii1.txt')
my_paths += [my_file_path]
my_Nqs += [1]
my_labels += [r'$N_q = 1$']
my_file_path = os.path.join(dir, 'Daitche_2_order_2Post_Files/radii2.txt')
#my_paths += [my_file_path]
#my_labels += [r'$N_q = 2$']
#my_Nqs += [2]
my_file_path = os.path.join(dir, 'Daitche_4_order_2Post_Files/radii4.txt')
my_paths += [my_file_path]
my_labels += [r'$N_q = 4$']
my_Nqs += [4]
my_file_path = os.path.join(dir, 'Daitche_8_order_2Post_Files/radii8.txt')
#my_paths += [my_file_path]
#my_labels += [r'$N_q = 8$']
#my_Nqs += [8]
my_file_path = os.path.join(dir, 'Daitche_16_order_2Post_Files/radii16.txt')
my_paths += [my_file_path]
my_labels += [r'$N_q = 16$']
my_Nqs += [16]
my_file_path = os.path.join(dir, 'Daitche_32_order_2Post_Files/radii32.txt')
#my_paths += [my_file_path]
#my_labels += [r'$N_q = 32$']
#my_Nqs += [32]
my_file_path = os.path.join(dir, 'Daitche_64_order_2Post_Files/radii64.txt')
my_paths += [my_file_path]
my_labels += [r'$N_q = 64$']
my_Nqs += [64]
exact_file_path = os.path.join(dir, 'Reference_Post_Files/exact_radii.txt')
my_radii_arrays = []
my_labels
for my_file_path in my_paths:
with open(my_file_path, mode = 'r') as f:
ts = []
my_radii = []
for line in f:
data = str.split(line)
ts.append(float(data[0]))
my_radii.append(float(data[1]))
my_radii_arrays += [my_radii]
with open(exact_file_path, mode = 'r') as f:
exact_radii = []
for line in f:
data = str.split(line)
exact_radii.append(float(data[1]))
exact_file_path_2 = os.path.join(dir, 'Reference_Post_Files_2/exact_radii.txt')
exact_file_path_4 = os.path.join(dir, 'Reference_Post_Files_4/exact_radii.txt')
exact_file_path_8 = os.path.join(dir, 'Reference_Post_Files_8/exact_radii.txt')
exact_file_path_16 = os.path.join(dir, 'Reference_Post_Files_16/exact_radii.txt')
with open(exact_file_path_2, mode = 'r') as f:
exact_radii_2 = []
for line in f:
data = str.split(line)
exact_radii_2.append(float(data[1]))
with open(exact_file_path_4, mode = 'r') as f:
exact_radii_4 = []
for line in f:
data = str.split(line)
exact_radii_4.append(float(data[1]))
with open(exact_file_path_8, mode = 'r') as f:
exact_radii_8 = []
for line in f:
data = str.split(line)
exact_radii_8.append(float(data[1]))
with open(exact_file_path_16, mode = 'r') as f:
exact_radii_16 = []
for line in f:
data = str.split(line)
exact_radii_16.append(float(data[1]))
j = 0
for my_radii in my_radii_arrays:
rel_error = [abs(exact_radii[i] - my_radii[i]) / exact_radii[i] for i in range(len(ts))]
print(len(rel_error))
plt.plot(ts, rel_error, label=my_labels[j], marker = 4 + j, markevery = int(1000), color='k')
j += 1
my_file_path_2 = os.path.join(dir, 'Daitche_t_step_2Post_Files/radii1.txt')
with open(my_file_path_2, mode = 'r') as f:
ts = []
my_radii = []
for line in f:
data = str.split(line)
ts.append(float(data[0]))
my_radii.append(float(data[1]))
my_radii_arrays += [my_radii]
rel_error = [abs(exact_radii_2[i] - my_radii[i]) / exact_radii_2[i] for i in range(len(ts))]
plt.plot(ts, rel_error, label=r'$\Delta t \times 2$', linestyle='--', marker = '*', markevery = int(1000 / 2), color='k')
my_file_path_4 = os.path.join(dir, 'Daitche_t_step_4Post_Files/radii1.txt')
with open(my_file_path_4, mode = 'r') as f:
ts = []
my_radii = []
for line in f:
data = str.split(line)
ts.append(float(data[0]))
my_radii.append(float(data[1]))
my_radii_arrays += [my_radii]
rel_error = [abs(exact_radii_4[i] - my_radii[i]) / exact_radii_4[i] for i in range(len(ts))]
plt.plot(ts, rel_error, label=r'$\Delta t \times 4$', linestyle='--', marker = 5, markevery = int(1000 / 4), color='k')
my_file_path_8 = os.path.join(dir, 'Daitche_t_step_8Post_Files/radii1.txt')
with open(my_file_path_8, mode = 'r') as f:
ts = []
my_radii = []
for line in f:
data = str.split(line)
ts.append(float(data[0]))
my_radii.append(float(data[1]))
my_radii_arrays += [my_radii]
rel_error = [abs(exact_radii_8[i] - my_radii[i]) / exact_radii_8[i] for i in range(len(ts))]
plt.plot(ts, rel_error, label=r'$\Delta t \times 8$', linestyle='--', marker = 3, markevery = int(1000 / 8), color='k')
my_file_path_16 = os.path.join(dir, 'Daitche_t_step_16Post_Files/radii1.txt')
with open(my_file_path_16, mode = 'r') as f:
ts = []
my_radii = []
for line in f:
data = str.split(line)
ts.append(float(data[0]))
my_radii.append(float(data[1]))
my_radii_arrays += [my_radii]
rel_error = [abs(exact_radii_16[i] - my_radii[i]) / exact_radii_16[i] for i in range(len(ts))]
plt.plot(ts, rel_error, label=r'$\Delta t \times 16$', linestyle='--', marker = 6, markevery = int(1000 / 16), color='k')
plt.semilogy()
plt.legend(loc='bottom right')
plt.ylabel(r'$E_{\mathrm{rel}}$')
plt.xlabel(r'$t$')
ax = plt.subplot(111)
handles, labels = ax.get_legend_handles_labels()
plt.legend(flip(handles, 2), flip(labels, 2), loc=4, ncol=2)
plt.savefig('ErrorInTime.eps', format='eps', dpi=1200)
plt.savefig('ErrorInTime.pdf', format='pdf', dpi=1200)
plt.show() | PypiClean |
/CheckEmails-0.1.tar.gz/CheckEmails-0.1/code/__init__.py | import json , requests
def A_Gmail(email):
url = 'https://android.clients.google.com/setup/checkavail'
headers = {
'Content-Length':'98',
'Content-Type':'text/plain; charset=UTF-8',
'Host':'android.clients.google.com',
'Connection':'Keep-Alive',
'user-agent':'GoogleLoginService/1.3(m0 JSS15J)',
}
data = json.dumps({
'username':f'{email}',
'version':'3',
'firstName':'Aegos',
'lastName':'Codeing'
})
response = requests.post(url,headers=headers,data=data)
if response.json()['status'] == 'SUCCESS':
return {'Status':'Available','AEGOS':'@G_4_2'}
else:
return {'Status':'UnAvailable','AEGOS':'@G_4_2'}
def A_Yahoo(email):
email2 = email.split('@')[0]
url2 = "https://login.yahoo.com/account/module/create?validateField=userId"
headers2 = {
'accept': '*/*',
'accept-encoding': 'gzip, deflate, br',
'accept-language': 'en-US,en;q=0.9',
'content-length': '7979',
'content-type': 'application/x-www-form-urlencoded; charset=UTF-8',
'cookie': 'A1=d=AQABBKsqRWQCEP0UsV5c9lOx8e5im2YNQ50FEgEBAQF8RmRPZAAAAAAA_eMAAA&S=AQAAApW5iPsgjBo-EVpzITncq1w; A3=d=AQABBKsqRWQCEP0UsV5c9lOx8e5im2YNQ50FEgEBAQF8RmRPZAAAAAAA_eMAAA&S=AQAAApW5iPsgjBo-EVpzITncq1w; A1S=d=AQABBKsqRWQCEP0UsV5c9lOx8e5im2YNQ50FEgEBAQF8RmRPZAAAAAAA_eMAAA&S=AQAAApW5iPsgjBo-EVpzITncq1w&j=WORLD; cmp=t=1682254514&j=0&u=1---; B=9qgodcpi4aalb&b=3&s=7t; GUC=AQEBAQFkRnxkT0IiCATl; AS=v=1&s=yWa5asCx&d=A64467c3b|dcjw_0n.2SoXBbaywfJ6pOKLuxGKrtyyLsUqPKnDloZ4PzLBcZineGWbyj4SSiaHVn.6gkyCaIlqSJGryRwnshefN43hbdPocziZnuN6cUMiC9Ls7jght5ak90PZbx8rt9nghZTUPpDYSsMNpii5aA9xWBEhMq__TTmv.rfLHzlCE8rgi5dk5PJouLBujcieRBtI7i.7PwU1jFkaeDhxE4dRMjpAQrjJKc6XqfbTBc5K9QaF6r1YVIVWHEpNrUzbZ_7sSzQ5QFoQNwVBgRzaFtm48hiQlg6S.xsMMdDWkw5xtlG7GZUC.V2jgWNgLScSwqCU_3ntveI_BrcuBy_XAXWQsUzNv3grKBv3qzhOMH3pl8DgTDV3wOo.GqdTtcsaaUn7O0i1hSoA0_EqNIXvRBBdePtBAjPWFZt6sK1Dy8S.kVvW9rIWxonS8GYw6jAw3FrkvM_xk8gxU4oKX1pk3h4m0iJVDQhlr0OOLGW7vBxnzYqidDFi01xQe608kLkJO9qx2X1Xv6XORvYJTNAOVfOMWV83D75M_7L4FOjog8f8F5EkOTU7LymG8GTXY2g4K1xBfGHyzAOPDv9NMjc0I_7wLdATcbn2axvwj5I2xiSqrxK8DYnqTVGqEt.tusj07ij4sobwY0FePNGjLOHICdau9tCajCSqBxtly23flz3iYPQ22Va6uuSaQ.c9mtXsBd0NTlWvlOc6zRdQK.uYkiCYg719UyeIFzDDWeFvQCbuBrstwX.zAkYz2YPaTs8ZGpogdgQ5OhaduuhR5jzvz2mmHXGh5fJ1kxfeClXFWbvCdu3T77mmXHxLGQpr3UZKnmiPO7VjxJoEd9SjYA_NFz9HPbvimmWgmv0DIXvdNvHKCQMYEUROQlk5XIH7oiQ1BtywZNvoWv1D7Q--~A',
'origin': 'https://login.yahoo.com',
'referer': 'https://login.yahoo.com/account/create?.lang=en-US&src=homepage&activity=ybar-signin&pspid=2023538075&.done=https%3A%2F%2Fwww.yahoo.com%2F&specId=yidregsimplified&done=https%3A%2F%2Fwww.yahoo.com%2F',
'sec-ch-ua': '"Chromium";v="112", "Microsoft Edge";v="112", "Not:A-Brand";v="99"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"Windows"',
'sec-fetch-dest': 'empty',
'sec-fetch-mode': 'cors',
'sec-fetch-site': 'same-origin',
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36 Edg/112.0.1722.48',
'x-requested-with': 'XMLHttpRequest',
}
data2 = {
'specId': 'yidregsimplified',
'cacheStored': '',
'crumb': 'hrxAgkAZ5jX',
'acrumb': 'yWa5asCx',
'sessionIndex': '',
'done': 'https://www.yahoo.com/',
'googleIdToken': '',
'authCode': '',
'attrSetIndex': '0',
'multiDomain': '',
'tos0': 'oath_freereg|xa|en-JO',
'firstName': 'Aegos',
'lastName': 'coding',
'userid-domain': 'yahoo',
'userId': f'{email2}',
'password': 'szdxfefdgfh',
'birthYear': '1998',
'signup': '',
}
response2 = requests.post(url2,headers=headers2,data=data2).text
if '{"errors":[{"name":"userId","error":"IDENTIFIER_EXISTS"}]}' in response2:
return {'Status':'UnAvailable','AEGOS':'@G_4_2'}
elif '{"errors":[]}' in response2:
{'Status':'Available','AEGOS':'@G_4_2'}
def A_Hotmail(email):
url3 = f'https://odc.officeapps.live.com/odc/emailhrd/getidp?hm=0&emailAddress={email}&_=1604288577990'
headers3 = {
'content-type': 'application/x-www-form-urlencoded',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.102 Safari/537.36',
}
response3 = requests.post(url3, headers=headers3).text
if 'Neither' in response3:
return {'Status':'Available','AEGOS':'@G_4_2'}
else:
return {'Status':'UnAvailable','AEGOS':'@G_4_2'}
def A_Aol(email):
email3 = email.split('@')[0]
url4 = "https://login.aol.com/account/module/create?validateField=yid"
headers4 = {
'accept': '*/*',
'accept-encoding': 'gzip, deflate, br',
'accept-language': 'en-US,en;q=0.9',
'content-length': '18430',
'content-type': 'application/x-www-form-urlencoded; charset=UTF-8',
'cookie': 'A1=d=AQABBAcaP2MCEDS0lcVAC7jDxca1x2QPSMAFEgEBAQFrQGNIYwAAAAAA_eMAAA&S=AQAAAk66bvBpHLzQZ0n3bQV7x6U; A3=d=AQABBAcaP2MCEDS0lcVAC7jDxca1x2QPSMAFEgEBAQFrQGNIYwAAAAAA_eMAAA&S=AQAAAk66bvBpHLzQZ0n3bQV7x6U; cmp=t=1665079824&j=0&u=1---; rxx=5dmbu5em0gs.2w52y1t9&v=1; AS=v=1&s=mE9oz2RU&d=A6340990f|BfPo7D7.2Soeua6Q5.JcZFuTeKDZd.VEwARWGa18pr8Nw39Pbg3lrVe2yFRyh3RRePi__A4A5bs6jgblICTjtwR23Xn2FaKNd3g4n2Nyoe0HUPOPhxc2_MkgSPb3Uv64NNH6b4oIbh0d6GPjVX.u1iE75NeNGVgDykpoV.GJb.ZOyA1hi3D079flz5FnGN3UPl4Jos.LGJjKE5jeRFZVRbTJyV_q0zmHwp0WmwaGpmtr2bKK2pVY_9dMpw5J1u9Wx0e_QeNBnAgpvDP_E02PBbuxEQQXAX0GF8IM_gu2g5D1CEPA15ailOgAaPTMDY7plQgXdP3cYarpT20WB0vRVdZXqvfsh7E.m8mX5QyFisDObrlDfLbh6nPbmjU_8BIyAHLvCBoCmF0u4BhXftXCqUgW5SadK6EzXKbn394dWjCdO0YJRStGJo_POkob5FNOWud6u3MY1IZS2ov3OD9LIoJy7w.mSCLZ.M84QgA0UgsGTrDOgTQJWeetwKIYy1RbR8lxFZr0IDwTLBAGflJkaNvnQqWxWbEjftCTvXH2CPXFaCRUnSObHQ2cP1Mb8kro2zkXtaUGmW_cD9oHxidsx6vaOfx4f_fSysGP5Aaa2z6NndXHWh_ium8B45ejj4MFh3F7my8_04UX4WjjiZIqGG0fXcLQxFrB1GY6Vnqo47oSmh4yBcZPV7eQ0CKATeJLshzj2SovAZcIdV1ptsKk9P.LVCZl6MeDskIxd5L6iixeCU6PMq84tz7Gmg6S~A; A1S=d=AQABBAcaP2MCEDS0lcVAC7jDxca1x2QPSMAFEgEBAQFrQGNIYwAAAAAA_eMAAA&S=AQAAAk66bvBpHLzQZ0n3bQV7x6U&j=WORLD',
'origin': 'https://login.aol.com',
'referer': 'https://login.aol.com/account/create?intl=uk&lang=en-gb&specId=yidReg&done=https%3A%2F%2Fwww.aol.com',
'sec-ch-ua': '"Chromium";v="106", "Google Chrome";v="106", "Not;A=Brand";v="99"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"Windows"',
'sec-fetch-dest': 'empty',
'sec-fetch-mode': 'cors',
'sec-fetch-site': 'same-origin',
'user-agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/106.0.0.0 Safari/537.36',
'x-requested-with': 'XMLHttpRequest',
}
data4 = {
'specId': 'yidreg',
'cacheStored': '',
'crumb': 'ks78hCqM4K.',
'acrumb': 'mE9oz2RU',
'done': 'https://www.aol.com',
'googleIdToken': '',
'authCode': '',
'attrSetIndex': '0',
'tos0': 'oath_freereg|uk|en-GB',
'firstName': 'Aegos',
'lastName': 'Coodeing',
'yid': email3,
'password': '1#$aegos$#1wjdytesre',
'shortCountryCode': 'IQ',
'phone': '7716555876',
'mm': '11',
'dd': '1',
'yyyy': '1998',
'freeformGender': '',
'signup': '',
}
response4 = requests.post(url4,headers=headers4,data=data4).text
if ('{"errors":[{"name":"yid","error":"IDENTIFIER_EXISTS"}]}') in response4:
return {'Status':'UnAvailable','AEGOS':'@G_4_2'}
elif ('{"errors":[]}') in response4:
return {'Status':'Available','AEGOS':'@G_4_2'}
def A_MailRu(email):
url5 = 'https://account.mail.ru/api/v1/user/exists'
headers5 = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/99.0.4844.51 Safari/537.36'
}
data5 = {
'email': str(email)
}
response5 = requests.post(url5,headers=headers5,data=data5).text
if 'exists":false' in response5:
return {'Status':'Available','AEGOS':'@G_4_2'}
else:
return {'Status':'UnAvailable','AEGOS':'@G_4_2'} | PypiClean |
/Diofant-0.14.0a2.tar.gz/Diofant-0.14.0a2/diofant/polys/partfrac.py |
import itertools
from ..core import Add, Dummy, Function, Integer, Lambda, preorder_traversal
from ..core.sympify import sympify
from ..utilities import numbered_symbols
from . import Poly, RootSum, cancel, factor
from .polyerrors import PolynomialError
from .polyoptions import allowed_flags, set_defaults
from .polytools import parallel_poly_from_expr
__all__ = 'apart', 'apart_list', 'assemble_partfrac_list'
def apart(f, x=None, full=False, **options):
"""
Compute partial fraction decomposition of a rational function.
Given a rational function ``f``, computes the partial fraction
decomposition of ``f``. Two algorithms are available: One is based on the
undertermined coefficients method, the other is Bronstein's full partial
fraction decomposition algorithm.
The undetermined coefficients method (selected by ``full=False``) uses
polynomial factorization (and therefore accepts the same options as
factor) for the denominator. Per default it works over the rational
numbers, therefore decomposition of denominators with non-rational roots
(e.g. irrational, complex roots) is not supported by default (see options
of factor).
Bronstein's algorithm can be selected by using ``full=True`` and allows a
decomposition of denominators with non-rational roots. A human-readable
result can be obtained via ``doit()`` (see examples below).
Examples
========
By default, using the undetermined coefficients method:
>>> apart(y/(x + 2)/(x + 1), x)
-y/(x + 2) + y/(x + 1)
The undetermined coefficients method does not provide a result when the
denominators roots are not rational:
>>> apart(y/(x**2 + x + 1), x)
y/(x**2 + x + 1)
You can choose Bronstein's algorithm by setting ``full=True``:
>>> apart(y/(x**2 + x + 1), x, full=True)
RootSum(_w**2 + _w + 1, Lambda(_a, (-2*y*_a/3 - y/3)/(x - _a)))
Calling ``doit()`` yields a human-readable result:
>>> apart(y/(x**2 + x + 1), x, full=True).doit()
(-y/3 - 2*y*(-1/2 - sqrt(3)*I/2)/3)/(x + 1/2 + sqrt(3)*I/2) + (-y/3 -
2*y*(-1/2 + sqrt(3)*I/2)/3)/(x + 1/2 - sqrt(3)*I/2)
See Also
========
apart_list, assemble_partfrac_list
"""
allowed_flags(options, [])
f = sympify(f)
if f.is_Atom:
return f
else:
P, Q = f.as_numer_denom()
_options = options.copy()
options = set_defaults(options, extension=True)
try:
(P, Q), _ = parallel_poly_from_expr((P, Q), x, **options)
except PolynomialError as exc:
if f.is_commutative:
raise PolynomialError(exc) from exc
# non-commutative
if f.is_Mul:
c, nc = f.args_cnc(split_1=False)
nc = f.func(*[apart(i, x=x, full=full, **_options) for i in nc])
if c:
c = apart(f.func._from_args(c), x=x, full=full, **_options)
return c*nc
else:
return nc
elif f.is_Add:
c = []
nc = []
for i in f.args:
if i.is_commutative:
c.append(i)
else:
nc.append(apart(i, x=x, full=full, **_options))
return apart(f.func(*c), x=x, full=full, **_options) + f.func(*nc)
else:
reps = []
pot = preorder_traversal(f)
next(pot)
for e in pot:
reps.append((e, apart(e, x=x, full=full, **_options)))
pot.skip() # this was handled successfully
return f.xreplace(dict(reps))
if P.is_multivariate:
fc = f.cancel()
if fc != f:
return apart(fc, x=x, full=full, **_options)
raise NotImplementedError(
'multivariate partial fraction decomposition')
common, P, Q = P.cancel(Q)
poly, P = P.div(Q, auto=True)
P, Q = P.rat_clear_denoms(Q)
if Q.degree() <= 1:
partial = P/Q
else:
if not full:
partial = apart_undetermined_coeffs(P, Q)
else:
partial = apart_full_decomposition(P, Q)
terms = Integer(0)
for term in Add.make_args(partial):
if term.has(RootSum):
terms += term
else:
terms += factor(term)
return common*(poly.as_expr() + terms)
def apart_undetermined_coeffs(P, Q):
"""Partial fractions via method of undetermined coefficients."""
X = numbered_symbols(cls=Dummy)
partial, symbols = [], []
_, factors = Q.factor_list()
for f, k in factors:
n, q = f.degree(), Q
for i in range(1, k + 1):
coeffs, q = list(itertools.islice(X, n)), q.quo(f)
partial.append((coeffs, q, f, i))
symbols.extend(coeffs)
dom = Q.domain.inject(*symbols)
F = Integer(0).as_poly(Q.gen, domain=dom)
for i, (coeffs, q, f, k) in enumerate(partial):
h = Poly(coeffs, Q.gen, domain=dom)
partial[i] = (h, f, k)
q = q.set_domain(dom)
F += h*q
system, result = [], Integer(0)
for (k,), coeff in F.terms():
system.append(coeff - P.coeff_monomial((k,)))
from ..solvers import solve
solution = solve(system, symbols)[0]
for h, f, k in partial:
h = h.as_expr().subs(solution)
result += h/f.as_expr()**k
return result
def apart_full_decomposition(P, Q):
"""
Bronstein's full partial fraction decomposition algorithm.
Given a univariate rational function ``f``, performing only GCD
operations over the algebraic closure of the initial ground domain
of definition, compute full partial fraction decomposition with
fractions having linear denominators.
Note that no factorization of the initial denominator of ``f`` is
performed. The final decomposition is formed in terms of a sum of
:class:`RootSum` instances.
References
==========
* :cite:`Bronstein1993partial`
"""
return assemble_partfrac_list(apart_list(P/Q, P.gens[0]))
def apart_list(f, x=None, dummies=None, **options):
"""
Compute partial fraction decomposition of a rational function
and return the result in structured form.
Given a rational function ``f`` compute the partial fraction decomposition
of ``f``. Only Bronstein's full partial fraction decomposition algorithm
is supported by this method. The return value is highly structured and
perfectly suited for further algorithmic treatment rather than being
human-readable. The function returns a tuple holding three elements:
* The first item is the common coefficient, free of the variable `x` used
for decomposition. (It is an element of the base field `K`.)
* The second item is the polynomial part of the decomposition. This can be
the zero polynomial. (It is an element of `K[x]`.)
* The third part itself is a list of quadruples. Each quadruple
has the following elements in this order:
- The (not necessarily irreducible) polynomial `D` whose roots `w_i` appear
in the linear denominator of a bunch of related fraction terms. (This item
can also be a list of explicit roots. However, at the moment ``apart_list``
never returns a result this way, but the related ``assemble_partfrac_list``
function accepts this format as input.)
- The numerator of the fraction, written as a function of the root `w`
- The linear denominator of the fraction *excluding its power exponent*,
written as a function of the root `w`.
- The power to which the denominator has to be raised.
On can always rebuild a plain expression by using the function ``assemble_partfrac_list``.
Examples
========
A first example:
>>> f = (2*x**3 - 2*x) / (x**2 - 2*x + 1)
>>> pfd = apart_list(f)
>>> pfd
(1,
Poly(2*x + 4, x, domain='ZZ'),
[(Poly(_w - 1, _w, domain='ZZ'), Lambda(_a, 4), Lambda(_a, x - _a), 1)])
>>> assemble_partfrac_list(pfd)
2*x + 4 + 4/(x - 1)
Second example:
>>> f = (-2*x - 2*x**2) / (3*x**2 - 6*x)
>>> pfd = apart_list(f)
>>> pfd
(-1,
Poly(2/3, x, domain='QQ'),
[(Poly(_w - 2, _w, domain='ZZ'), Lambda(_a, 2), Lambda(_a, x - _a), 1)])
>>> assemble_partfrac_list(pfd)
-2/3 - 2/(x - 2)
Another example, showing symbolic parameters:
>>> pfd = apart_list(t/(x**2 + x + t), x)
>>> pfd
(1,
Poly(0, x, domain='ZZ[t]'),
[(Poly(_w**2 + _w + t, _w, domain='ZZ[t]'),
Lambda(_a, -2*t*_a/(4*t - 1) - t/(4*t - 1)),
Lambda(_a, x - _a), 1)])
>>> assemble_partfrac_list(pfd)
RootSum(t + _w**2 + _w, Lambda(_a, (-2*t*_a/(4*t - 1) - t/(4*t - 1))/(x - _a)))
This example is taken from Bronstein's original paper:
>>> f = 36 / (x**5 - 2*x**4 - 2*x**3 + 4*x**2 + x - 2)
>>> pfd = apart_list(f)
>>> pfd
(1,
Poly(0, x, domain='ZZ'),
[(Poly(_w - 2, _w, domain='ZZ'), Lambda(_a, 4), Lambda(_a, x - _a), 1),
(Poly(_w**2 - 1, _w, domain='ZZ'), Lambda(_a, -3*_a - 6), Lambda(_a, x - _a), 2),
(Poly(_w + 1, _w, domain='ZZ'), Lambda(_a, -4), Lambda(_a, x - _a), 1)])
>>> assemble_partfrac_list(pfd)
-4/(x + 1) - 3/(x + 1)**2 - 9/(x - 1)**2 + 4/(x - 2)
See also
========
apart, assemble_partfrac_list
References
==========
* :cite:`Bronstein1993partial`
"""
allowed_flags(options, [])
f = sympify(f)
if f.is_Atom:
return f
else:
P, Q = f.as_numer_denom()
options = set_defaults(options, extension=True)
(P, Q), _ = parallel_poly_from_expr((P, Q), x, **options)
if P.is_multivariate:
raise NotImplementedError('multivariate partial fraction decomposition')
common, P, Q = P.cancel(Q)
poly, P = P.div(Q, auto=True)
P, Q = P.rat_clear_denoms(Q)
polypart = poly
if dummies is None:
def dummies(name):
d = Dummy(name)
while True:
yield d
dummies = dummies('w')
rationalpart = apart_list_full_decomposition(P, Q, dummies)
return common, polypart, rationalpart
def apart_list_full_decomposition(P, Q, dummygen):
"""
Bronstein's full partial fraction decomposition algorithm.
Given a univariate rational function ``f``, performing only GCD
operations over the algebraic closure of the initial ground domain
of definition, compute full partial fraction decomposition with
fractions having linear denominators.
Note that no factorization of the initial denominator of ``f`` is
performed. The final decomposition is formed in terms of a sum of
:class:`RootSum` instances.
References
==========
* :cite:`Bronstein1993partial`
"""
f, x, U = P/Q, P.gen, []
u = Function('u')(x)
a = Dummy('a')
Q_c, Q_sqf = Q.sqf_list()
if Q_c != 1 and Q_sqf:
if Q_sqf[0][1] == 1:
Q_sqf[0] = Q_c*Q_sqf[0][0], 1
else:
Q_sqf.insert(0, (Q_c.as_poly(x), 1))
partial = []
for d, n in Q_sqf:
b = d.as_expr()
U += [u.diff((x, n - 1))]
h = cancel(f*b**n) / u**n
H, subs = [h], []
for j in range(1, n):
H += [H[-1].diff(x) / j]
for j in range(1, n + 1):
subs += [(U[j - 1], b.diff((x, j)) / j)]
for j in range(n):
P, Q = cancel(H[j]).as_numer_denom()
for i in range(j + 1):
P = P.subs([subs[j - i]])
Q = Q.subs([subs[0]])
P = P.as_poly(x)
Q = Q.as_poly(x)
G = P.gcd(d)
D = d.quo(G)
B, g = Q.half_gcdex(D)
b = (P * B.quo(g)).rem(D)
Dw = D.subs({x: next(dummygen)})
numer = Lambda(a, b.as_expr().subs({x: a}))
denom = Lambda(a, (x - a))
exponent = n-j
partial.append((Dw, numer, denom, exponent))
return partial
def assemble_partfrac_list(partial_list):
r"""Reassemble a full partial fraction decomposition
from a structured result obtained by the function ``apart_list``.
Examples
========
This example is taken from Bronstein's original paper:
>>> f = 36 / (x**5 - 2*x**4 - 2*x**3 + 4*x**2 + x - 2)
>>> pfd = apart_list(f)
>>> pfd
(1,
Poly(0, x, domain='ZZ'),
[(Poly(_w - 2, _w, domain='ZZ'), Lambda(_a, 4), Lambda(_a, x - _a), 1),
(Poly(_w**2 - 1, _w, domain='ZZ'), Lambda(_a, -3*_a - 6), Lambda(_a, x - _a), 2),
(Poly(_w + 1, _w, domain='ZZ'), Lambda(_a, -4), Lambda(_a, x - _a), 1)])
>>> assemble_partfrac_list(pfd)
-4/(x + 1) - 3/(x + 1)**2 - 9/(x - 1)**2 + 4/(x - 2)
If we happen to know some roots we can provide them easily inside the structure:
>>> pfd = apart_list(2/(x**2-2))
>>> pfd
(1,
Poly(0, x, domain='ZZ'),
[(Poly(_w**2 - 2, _w, domain='ZZ'),
Lambda(_a, _a/2), Lambda(_a, x - _a),
1)])
>>> pfda = assemble_partfrac_list(pfd)
>>> pfda
RootSum(_w**2 - 2, Lambda(_a, _a/(x - _a)))/2
>>> pfda.doit()
-sqrt(2)/(2*(x + sqrt(2))) + sqrt(2)/(2*(x - sqrt(2)))
>>> a = Dummy('a')
>>> pfd = (1, Integer(0).as_poly(x),
... [([sqrt(2), -sqrt(2)],
... Lambda(a, a/2), Lambda(a, -a + x), 1)])
>>> assemble_partfrac_list(pfd)
-sqrt(2)/(2*(x + sqrt(2))) + sqrt(2)/(2*(x - sqrt(2)))
See also
========
apart, apart_list
"""
# Common factor
common = partial_list[0]
# Polynomial part
polypart = partial_list[1]
pfd = polypart.as_expr()
# Rational parts
for r, nf, df, ex in partial_list[2]:
if isinstance(r, Poly):
# Assemble in case the roots are given implicitly by a polynomials
an, nu = nf.variables, nf.expr
ad, de = df.variables, df.expr
# Hack to make dummies equal because Lambda created new Dummies
de = de.subs({ad[0]: an[0]})
func = Lambda(an, nu/de**ex)
pfd += RootSum(r, func, auto=False, quadratic=False)
else:
# Assemble in case the roots are given explicitly by a list of algebraic numbers
for root in r:
pfd += nf(root)/df(root)**ex
return common*pfd | PypiClean |
/ClueDojo-1.4.3-1.tar.gz/ClueDojo-1.4.3-1/src/cluedojo/static/dojo/nls/it/colors.js | ({"lightsteelblue":"blu acciao chiaro","orangered":"vermiglio","midnightblue":"blu melanzana scuro","cadetblue":"verde acqua","seashell":"sabbia rosa","slategrey":"grigio ardesia","coral":"corallo","darkturquoise":"turchese scuro","antiquewhite":"bianco antico","mediumspringgreen":"verde primavera medio","salmon":"salmone","darkgrey":"grigio scuro","ivory":"avorio","greenyellow":"giallo verde","mistyrose":"rosa pallido","lightsalmon":"salmone chiaro","silver":"grigio 25%","dimgrey":"grigio 80%","orange":"arancione","white":"bianco","navajowhite":"pesca chiaro","royalblue":"blu reale","deeppink":"ciclamino","lime":"verde fluorescente","oldlace":"mandorla","chartreuse":"verde brillante","darkcyan":"ciano scuro","yellow":"giallo","linen":"lino","olive":"verde oliva","gold":"oro","lawngreen":"verde prato","lightyellow":"giallo chiaro","tan":"grigio bruno","darkviolet":"viola scuro","lightslategrey":"grigio ardesia chiaro","grey":"grigio","darkkhaki":"kaki scuro","green":"verde","deepskyblue":"azzurro cielo scuro","aqua":"acqua","sienna":"cuoio","mintcream":"bianco nuvola","rosybrown":"marrone rosato","mediumslateblue":"blu ardesia medio","magenta":"magenta","lightseagreen":"verde mare chiaro","cyan":"ciano","olivedrab":"marrone oliva","darkgoldenrod":"ocra scuro","slateblue":"blu ardesia","mediumaquamarine":"acquamarina medio","lavender":"lavanda","mediumseagreen":"verde mare medio","maroon":"scarlatto","darkslategray":"grigio ardesia scuro","mediumturquoise":"turchese medio","ghostwhite":"bianco gesso","darkblue":"blu scuro","mediumvioletred":"vinaccia","brown":"marrone","lightgray":"grigio chiaro","sandybrown":"marrone sabbia","pink":"rosa","firebrick":"rosso mattone","indigo":"indaco","snow":"neve","darkorchid":"orchidea scuro","turquoise":"turchese","chocolate":"cioccolato","springgreen":"verde primavera","moccasin":"mocassino","navy":"blu notte","lemonchiffon":"caffelatte chiaro","teal":"verde turchese","floralwhite":"bianco giglio","cornflowerblue":"blu fiordaliso","paleturquoise":"turchese pallido","purple":"porpora","gainsboro":"grigio 10%","plum":"prugna","red":"rosso","blue":"blu","forestgreen":"verde foresta","darkgreen":"verde scuro","honeydew":"bianco germoglio","darkseagreen":"verde mare scuro","lightcoral":"rosa corallo","palevioletred":"vinaccia chiaro","mediumpurple":"porpora medio","saddlebrown":"cacao","darkmagenta":"magenta scuro","thistle":"rosa cenere","whitesmoke":"bianco fumo","wheat":"sabbia","violet":"viola","lightskyblue":"azzurro cielo chiaro","goldenrod":"ocra gialla","mediumblue":"blu medio","skyblue":"azzurro cielo","crimson":"cremisi","darksalmon":"salmone scuro","darkred":"rosso scuro","darkslategrey":"grigio ardesia scuro","peru":"marrone terra bruciata","lightgrey":"grigio chiaro","lightgoldenrodyellow":"giallo tenue","blanchedalmond":"mandorla chiaro","aliceblue":"blu alice","bisque":"incarnato","slategray":"grigio ardesia","palegoldenrod":"giallo zolfo chiaro","darkorange":"arancione scuro","aquamarine":"acquamarina","lightgreen":"verde chiaro","burlywood":"tabacco","dodgerblue":"blu d'oriente","darkgray":"grigio scuro","lightcyan":"ciano chiaro","powderblue":"azzurro polvere","blueviolet":"blu violetto","orchid":"orchidea","dimgray":"grigio 80%","beige":"beige","fuchsia":"fucsia","lavenderblush":"bianco rosato","hotpink":"rosa acceso","steelblue":"blu acciao","tomato":"pomodoro","lightpink":"rosa chiaro","limegreen":"verde lime","indianred":"terra indiana","papayawhip":"cipria","lightslategray":"grigio ardesia chiaro","gray":"grigio","mediumorchid":"orchidea medio","cornsilk":"crema","black":"nero","seagreen":"verde mare","darkslateblue":"blu ardesia scuro","khaki":"kaki","lightblue":"azzurro","palegreen":"verde pallido","azure":"azzurro ghiaccio","peachpuff":"pesca","darkolivegreen":"verde oliva scuro","yellowgreen":"giallo verde"}) | PypiClean |
/Engezny-1.3.tar.gz/Engezny-1.3/README.md | # Engezny
***Engezny*** is a python package that quickly generates all possible charts from your dataframe and save them for you, and engezny is only supporting now uniparameter visualization using the pie, bar and barh visualizations.
## New release features:
1. Full control with colors used in charts.
2. Arabic issues solved.
4. Base of data control.
5. Issues solved with saving charts.
6. New list of options which make your customization easier (Check them in parameters).
## Advanteges
1. Totally supports Arabic Language.
2. Handles multi-parameters columns and separates them by a comma.
3. The output charts are fully descriptive.
## Installation Guide
To install ***Engezny*** Package:
- Make sure python3 and pip is installed in your machine.
- Use the command line and type: `pip install Engezny` or `pip3 install Engezny`
- You can check if the package is installed by typing: `pip freeze | grep Engezny` or `pip3 freeze | grep Engezny`
## How to Use?
The most simple format of using ***Engezny*** is as follow:
```python
from Engezny import Engezny
import pandas as pd
Data = pd.read_csv("FileName.csv")
Egz = Engezny(Data)
Egz.visualize()
```
## Visualization Parameters
- `start (int) default = 0`: The first column to start with index
- `end (int) default = None`: The final column to end with index
- `location (str) default ='Charts/'`: The file location to save charts in. The package creates the file if not exsist
- `extention (str) default = 'jpg'`: The extention of the created charts
- `colors (list) default = None`: A list of colors HEX codes to use in charts
- `save (bool) default = True`: The save option to save generated charts to the local machine
- `multi_sep (str) default = None`: the separator delimiter for multiselection columns
- `single_sep (str) default = None`: A delimiter to split single values with
- `figsize (tuple) default = (15, 15)`: The figure size of charts
- `base (str) default = 'total_values'`: The base to use in charts. Makes a difference with multiselection columns and has three available options:
- `'total_values'`: Uses the sum of total values as a base
- `'data_base'`: Uses the total number of rows in data as a base
- `'column_base'`: Uses the count of non-null cells in the column as a base
- `other (bool) default = False` The other option takes only the top 4 values and combine the rest in and other value
| PypiClean |
/Keg-0.11.0.tar.gz/Keg-0.11.0/keg/db/dialect_ops.py | import sqlalchemy as sa
from sqlalchemy import MetaData
from ..db import db
class DialectOperations(object):
dialect_map = {}
option_defaults = None
def __init__(self, engine, bind_name, options=None):
# this engine is tied to a particular "bind" use it instead of db.engine
self.engine = engine
self.bind_name = bind_name
self.assign_options(options or {})
def assign_options(self, option_pairs):
if not self.option_defaults:
return
for option_key in self.option_defaults.keys():
# Each option will be determined from the most specific config key set for
# the dialect and bind:
# bind-level dialect option > generic dialect option > default value
dialect_key = '{}.{}'.format(self.dialect_name, option_key)
bind_key = 'bind.{}.{}'.format(self.bind_name, option_key)
attr_name = 'opt_{}'.format(option_key)
default_opt_value = self.option_defaults[option_key]
opt_value = option_pairs.get(
bind_key,
option_pairs.get(
dialect_key,
default_opt_value
)
)
setattr(self, attr_name, opt_value)
def execute_sql(self, statements):
with self.engine.begin() as conn:
for sql in statements:
conn.execute(sa.text(sql))
def create_all(self):
self.create_schemas()
db.create_all(self.bind_name)
def create_schemas(self):
pass
@classmethod
def create_for(cls, engine, bind_name, options):
dialect_name = engine.dialect.name
if dialect_name in cls.dialect_map:
cls = cls.dialect_map[dialect_name]
return cls(engine, bind_name, options)
else:
raise Exception('DialectOperations does not yet support the "{}" database.'
.format(dialect_name))
def on_connect(self, dbapi_connection, connection_record):
pass
class PostgreSQLOps(DialectOperations):
dialect_name = 'postgresql'
option_defaults = {'schemas': ('public',)}
def create_schemas(self):
sql = []
connection_user = self.engine.url.username
for schema in self.opt_schemas:
sql.extend([
f'CREATE SCHEMA IF NOT EXISTS "{schema}" AUTHORIZATION "{connection_user}";',
f'GRANT ALL ON SCHEMA "{schema}" TO "{connection_user}";',
])
self.execute_sql(sql)
def create_all(self):
self.create_schemas()
super().create_all()
def drop_all(self):
sql = []
for schema in self.opt_schemas:
sql.extend([
'DROP SCHEMA IF EXISTS "{}" CASCADE;'.format(schema),
])
self.execute_sql(sql)
DialectOperations.dialect_map['postgresql'] = PostgreSQLOps
class SQLiteOps(DialectOperations):
dialect_name = 'sqlite'
def on_connect(self, dbapi_connection, connection_record):
# Want SQLite to use foreign keys
# todo: if this becomes undesirable for some reason, we can make it an option.
cursor = dbapi_connection.cursor()
cursor.execute("PRAGMA foreign_keys=ON")
cursor.close()
def drop_all(self):
# drop the views
sql = "select name from sqlite_master where type='view'"
with self.engine.begin() as conn:
rows = conn.execute(sa.text(sql))
drop_sql = ['drop view {0}'.format(record.name) for record in rows]
self.execute_sql(drop_sql)
# Find all the tables using metadata and reflection. Use a custom MetaData instance to
# avoid contaminating the metadata associated with our entities.
md = MetaData()
md.reflect(self.engine)
for table in reversed(md.sorted_tables):
try:
with self.engine.begin() as conn:
conn.execute(sa.text('drop table {}'.format(table.name)))
except Exception as e:
if 'no such table' not in str(e):
raise
DialectOperations.dialect_map['sqlite'] = SQLiteOps
class MicrosoftSQLOps(DialectOperations):
dialect_name = 'mssql'
option_defaults = {'schemas': tuple()}
def drop_all(self):
# generate drops for all objects, being careful of the schema the object belongs to
mapping = {
'P': 'drop procedure [{schema_name}].[{name}]',
'C': 'alter table [{schema_name}].[{parent_name}] drop constraint [{name}]',
('FN', 'IF', 'TF'): 'drop function [{schema_name}].[{name}]',
'V': 'drop view [{schema_name}].[{name}]',
'F': 'alter table [{schema_name}].[{parent_name}] drop constraint [{name}]',
'U': 'drop table [{schema_name}].[{name}]',
}
delete_sql = []
for type, drop_sql in mapping.items():
sql = (
"select name, object_name( parent_object_id ) as parent_name "
", OBJECT_SCHEMA_NAME(object_id) as schema_name "
"from sys.objects where type in ('{}')"
" and name not like '#%'" # Avoid cached temporary tables
).format("', '".join(type))
with self.engine.begin() as conn:
rows = conn.execute(sa.text(sql))
for row in rows:
delete_sql.append(
drop_sql.format(
name=row.name,
parent_name=row.parent_name,
schema_name=row.schema_name,
)
)
# removing schemas can be tricky. SQL Server 2016+ supports DROP SCHEMA IF EXISTS ...
# syntax, but we need to support earlier versions. Technically, an IF EXISTS(...) DROP
# SCHEMA should work, but testing shows the drop never happens when executed in this
# fashion. So, query sys.schemas directly, and drop any schemas that we are interested
# in (according to the bind opts)
schema_sql = 'select name from sys.schemas'
with self.engine.begin() as conn:
rows = conn.execute(sa.text(schema_sql))
for row in rows:
if row.name in self.opt_schemas:
delete_sql.append('drop schema {}'.format(row.name))
# all drops should be in order, execute them all
self.execute_sql(delete_sql)
def create_schemas(self):
sql = []
for schema in self.opt_schemas:
# MSSQL has to run CREATE SCHEMA as its own batch
# So, we can't use an IF NOT EXISTS at the same time. Test first, then create.
with self.engine.begin() as conn:
existing = conn.execute(
sa.text(
"SELECT COUNT(*) FROM sys.schemas WHERE name = N'{}'".format(
schema
)
)
).scalar()
if not existing:
sql.extend([
'CREATE SCHEMA {}'.format(schema),
])
self.execute_sql(sql)
DialectOperations.dialect_map['mssql'] = MicrosoftSQLOps | PypiClean |
/AdHoc-0.3.2.tar.gz/AdHoc-0.3.2/README.txt | .. -*- mode: rst; coding: utf-8 -*-
.. role:: mod(strong)
.. role:: func(strong)
.. role:: class(strong)
.. role:: attr(strong)
.. role:: meth(strong)
AdHoc Standalone Python Script Generator
########################################
The *AdHoc* compiler can be used as a program (see `Script Usage`_)
as well as a module (see :class:`adhoc.AdHoc`).
Since the *AdHoc* compiler itself is installed as a compiled *AdHoc*
script, it serves as its own usage example.
After installation of the *adhoc.py* script, the full source can be
obtained in directory ``__adhoc__``, by executing::
adhoc.py --explode
.. contents::
Purpose
=======
*AdHoc* provides python scripts with
- template facilities
- default file generation
- standalone module inclusion
*AdHoc* has been designed to provide an implode/explode cycle:
======== ======= ========= ======= =========
source_0 xsource_0
source_1 implode explode xsource_1
... ------> script.py ------> ...
source_n xsource_n
======== ======= ========= ======= =========
where ``xsource_i === source_i``. I.e., ``diff source_i xsource_i``
does not produce any output.
Quickstart
==========
module.py:
| # -\*- coding: utf-8 -\*-
| mvar = 'value'
script.py:
| # -\*- coding: utf-8 -\*-
| # |adhoc_run_time|
| import module # |adhoc|
| print('mvar: ' + module.mvar)
Compilation::
adhoc.py --compile script.py >/tmp/script-compiled.py
Execution outside source directory::
cd /tmp && python script-compiled.py
shows::
mvar: value
Decompilation::
cd /tmp && \
mkdir -p __adhoc__ && \
adhoc.py --decompile <script-compiled.py >__adhoc__/script.py
.. |@:| replace:: ``@:``
.. |:@| replace:: ``:@``
.. |adhoc_run_time| replace:: |@:|\ ``adhoc_run_time``\ |:@|
.. |adhoc| replace:: |@:|\ ``adhoc``\ |:@|
Description
===========
The *AdHoc* compiler/decompiler parses text for tagged lines and
processes them as instructions.
The minimal parsed entity is a tagged line, which is any line
containing a recognized *AdHoc* tag.
All *AdHoc* tags are enclosed in delimiters (default: |@:| and |:@|). E.g:
|@:|\ adhoc\ |:@|
Delimiters come in several flavors, namely line and section
delimiters and a set of macro delimiters. By default, line and
section delimiters are the same, but they can be defined separately.
`Flags`_ are tagged lines, which denote a single option or
command. E.g.:
| import module # |@:|\ adhoc\ |:@|
| # |@:|\ adhoc_self\ |:@| my_module_name
`Sections`_ are tagged line pairs, which delimit a block of
text. The first tagged line opens the section, the second tagged
line closes the section. E.g.:
| # |@:|\ adhoc_enable\ |:@|
| # disabled_command()
| # |@:|\ adhoc_enable\ |:@|
`Macros`_ have their own delimiters (default: |@m| and |m>|). E.g.:
| # |@m|\ MACRO_NAME\ |m>|
The implementation is realized as class :class:`adhoc.AdHoc` which
is mainly used as a namespace. The run-time part of
:class:`adhoc.AdHoc` -- which handles module import and file export
-- is included verbatim as class :class:`RtAdHoc` in the generated
output.
Flags
-----
:|adhoc_run_time|:
The place where the *AdHoc* run-time code is added. This flag must
be present in files, which use the |adhoc| import feature. It
is not needed for the enable/disable features.
This flag is ignored, if double commented. E.g.:
| # # |adhoc_run_time|
:|adhoc| [force] [flat | full]:
Mark import line for run-time compilation.
If ``force`` is specified, the module is imported, even if it
was imported before.
If ``flat`` is specified, the module is not recursively
exported.
If ``full`` is specified, the module is recursively
exported. (This parameter takes priority over ``flat``).
If neither ``flat`` nor ``full`` are specified,
:attr:`adhoc.AdHoc.flat` determines the export scope.
This flag is ignored, if the line is commented out. E.g.:
| # import module # |adhoc|
.. _adhoc_include:
:|adhoc_include| file_spec, ...:
Include files for unpacking. ``file_spec`` is one of
:file:
``file`` is used for both input and output.
:file ``from`` default-file:
``file`` is used for input and output. if ``file`` does not
exist, ``default-file`` is used for input.
:source-file ``as`` output-file:
``source-file`` is used for input. ``output-file`` is used for
output. If ``source-file`` does not exist, ``output-file`` is
used for input also.
This flag is ignored, if double commented. E.g.:
| # # |adhoc_include| file
:|adhoc_verbatim| [flags] file_spec, ...:
Include files for verbatim extraction. See adhoc_include_ for
``file_spec``.
The files are included as |adhoc_template_v| sections. *file* is used
as *export_file* mark. If *file* is ``--``, the template disposition
becomes standard output.
Optional flags can be any combination of ``[+|-]NUM`` for
indentation and ``#`` for commenting. E.g.:
| # |adhoc_verbatim| +4# my_file from /dev/null
*my_file* (or ``/dev/null``) is read, commented and indented 4
spaces.
If the |adhoc_verbatim| tag is already indented, the specified
indentation is subtracted.
This flag is ignored, if double commented. E.g.:
| # # |adhoc_verbatim| file
:|adhoc_self| name ...:
Mark name(s) as currently compiling. This is useful, if
``__init__.py`` imports other module parts. E.g:
| import pyjsmo # |@:|\ adhoc\ |:@|
where ``pyjsmo/__init__.py`` contains:
| # |@:|\ adhoc_self\ |:@| pyjsmo
| from pyjsmo.base import * # |@:|\ adhoc\ |:@|
:|adhoc_compiled|:
If present, no compilation is done on this file. This flag is
added by the compiler to the run-time version.
Sections
--------
:|adhoc_enable|:
Leading comment char and exactly one space are removed from lines
in these sections.
:|adhoc_disable|:
A comment char and exactly one space are added to non-blank
lines in these sections.
:|adhoc_template| -mark | export_file:
If mark starts with ``-``, the output disposition is standard output
and the template is ignored, when exporting.
Otherwise, the template is written to output_file during export.
All template parts with the same mark/export_file are concatenated
to a single string.
:|adhoc_template_v| export_file:
Variation of |adhoc_template|. Automatically generated by |adhoc_verbatim|.
:|adhoc_uncomment|:
Treated like |adhoc_enable| before template output.
:|adhoc_indent| [+|-]NUM:
Add or remove indentation before template output.
:|adhoc_import|:
Imported files are marked as such by the compiler. There is no
effect during compilation.
:|adhoc_unpack|:
Included files are marked as such by the compiler. There is no
effect during compilation.
:|adhoc_remove|:
Added sections are marked as such by the compiler. Removal is
done when exporting.
Before compilation, existing |adhoc_remove| tags are renamed to
|adhoc_remove_|.
After automatically added |adhoc_remove| sections have been
removed during export, remaining |adhoc_remove_| tags are
renamed to |adhoc_remove| again.
.. note:: Think twice, before removing material from original
sources at compile time. It will violate the condition
``xsource_i === source_i``.
:|adhoc_run_time_engine|:
The run-time class :class:`RtAdHoc` is enclosed in this special
template section.
It is exported as ``rt_adhoc.py`` during export.
Macros
------
Macros are defined programmatically::
AdHoc.macros[MACRO_NAME] = EXPANSION_STRING
A macro is invoked by enclosing a MACRO_NAME in
:attr:`adhoc.AdHoc.macro_call_delimiters`. (Default: |@m|, |m>|).
:|MACRO_NAME|:
Macro call.
Internal
--------
:|adhoc_run_time_class|:
Marks the beginning of the run-time class. This is only
recognized in the *AdHoc* programm/module.
:|adhoc_run_time_section|:
All sections are concatenated and used as run-time code. This is
only recognized in the *AdHoc* programm/module.
In order to preserve the ``xsource_i === source_i`` bijective
condition, macros are expanded/collapsed with special macro
definition sections. (See :attr:`adhoc.AdHoc.macro_xdef_delimiters`;
Default: |<m|, |m@|).
:|adhoc_macro_call|:
Macro call section.
:|adhoc_macro_expansion|:
Macro expansion section.
AdHoc Script
============
.. |adhoc_self| replace:: |@:|\ ``adhoc_self``\ |:@|
.. |adhoc_include| replace:: |@:|\ ``adhoc_include``\ |:@|
.. |adhoc_verbatim| replace:: |@:|\ ``adhoc_verbatim``\ |:@|
.. |adhoc_compiled| replace:: |@:|\ ``adhoc_compiled``\ |:@|
.. |adhoc_enable| replace:: |@:|\ ``adhoc_enable``\ |:@|
.. |adhoc_disable| replace:: |@:|\ ``adhoc_disable``\ |:@|
.. |adhoc_template| replace:: |@:|\ ``adhoc_template``\ |:@|
.. |adhoc_template_v| replace:: |@:|\ ``adhoc_template_v``\ |:@|
.. |adhoc_uncomment| replace:: |@:|\ ``adhoc_uncomment``\ |:@|
.. |adhoc_indent| replace:: |@:|\ ``adhoc_indent``\ |:@|
.. |adhoc_import| replace:: |@:|\ ``adhoc_import``\ |:@|
.. |adhoc_unpack| replace:: |@:|\ ``adhoc_unpack``\ |:@|
.. |adhoc_remove| replace:: |@:|\ ``adhoc_remove``\ |:@|
.. |adhoc_remove_| replace:: |@:|\ ``adhoc_remove_``\ |:@|
.. |adhoc_run_time_class| replace:: |@:|\ ``adhoc_run_time_class``\ |:@|
.. |adhoc_run_time_section| replace:: |@:|\ ``adhoc_run_time_section``\ |:@|
.. |adhoc_run_time_engine| replace:: |@:|\ ``adhoc_run_time_engine``\ |:@|
.. |@m| replace:: ``@|:``
.. |m>| replace:: ``:|>``
.. |<m| replace:: ``<|:``
.. |m@| replace:: ``:|@``
.. |MACRO_NAME| replace:: |@m|\ ``MACRO_NAME``\ |m>|
.. |adhoc_macro_call| replace:: |<m|\ ``adhoc_macro_call``\ |m@|
.. |adhoc_macro_expansion| replace:: |<m|\ ``adhoc_macro_expansion``\ |m@|
.. _Script Usage:
adhoc.py - Python ad hoc compiler.
====== ====================
usage: adhoc.py [OPTIONS] [file ...]
or import adhoc
====== ====================
Options
=======
===================== ==================================================
-c, --compile compile file(s) or standard input into output file
(default: standard output).
-d, --decompile decompile file(s) or standard input into
output directory (default ``__adhoc__``).
-o, --output OUT output file for --compile/output directory for
--decompile.
-q, --quiet suppress warnings
-v, --verbose verbose test output
--debug[=NUM] show debug information
-h, --help display this help message
--documentation display module documentation.
--template list show available templates.
--eide[=COMM] Emacs IDE template list (implies --template list).
--template[=NAME] extract named template to standard
output. Default NAME is ``-``.
--extract[=DIR] extract adhoc files to directory DIR (default: ``.``)
--explode[=DIR] explode script with adhoc in directory DIR
(default ``__adhoc__``)
--implode implode script with adhoc
--install install adhoc.py script
-t, --test run doc tests
===================== ==================================================
*adhoc.py* is compatible with Python 2.4+ and Python 3. (For Python
<2.6 the packages *stringformat* and *argparse* are needed and
included.)
.. _END_OF_HELP:
.. |=NUM| replace:: ``[=NUM]``
Script Examples
===============
Templates
---------
Sections marked by |adhoc_template| can be retrieved as templates on
standard output.
Additionally, all other files compiled into an adhoc file with one of
================ ======================
|adhoc| ==> |adhoc_import|
|adhoc_verbatim| ==> |adhoc_template_v|
|adhoc_include| ==> |adhoc_unpack|
================ ======================
are accessible as templates.
``python adhoc.py --template list`` provides a list of templates:
>>> ign = main('adhoc.py --template list'.split())
================================================= ================================ ================
Command Template Type
================================================= ================================ ================
adhoc.py --template adhoc_test # !adhoc_test adhoc_import
adhoc.py --template adhoc_test.sub # !adhoc_test.sub adhoc_import
adhoc.py --template argparse_local # !argparse_local adhoc_import
adhoc.py --template namespace_dict # !namespace_dict adhoc_import
adhoc.py --template stringformat_local # !stringformat_local adhoc_import
adhoc.py --template use_case_000_ # !use_case_000_ adhoc_import
adhoc.py --template use_case_001_templates_ # !use_case_001_templates_ adhoc_import
adhoc.py --template use_case_002_include_ # !use_case_002_include_ adhoc_import
adhoc.py --template use_case_003_import_ # !use_case_003_import_ adhoc_import
adhoc.py --template use_case_005_nested_ # !use_case_005_nested_ adhoc_import
adhoc.py --template docutils.conf # docutils.conf adhoc_template_v
adhoc.py --template # - adhoc_template
adhoc.py --template README.txt # README.txt adhoc_template
adhoc.py --template adhoc_init # -adhoc_init adhoc_template
adhoc.py --template catch-stdout # -catch-stdout adhoc_template
adhoc.py --template col-param-closure # -col-param-closure adhoc_template
adhoc.py --template doc/USE_CASES.txt # doc/USE_CASES.txt adhoc_template
adhoc.py --template doc/index.rst # doc/index.rst adhoc_template
adhoc.py --template max-width-class # -max-width-class adhoc_template
adhoc.py --template rst-to-ascii # -rst-to-ascii adhoc_template
adhoc.py --template test # -test adhoc_template
adhoc.py --template MANIFEST.in # !MANIFEST.in adhoc_unpack
adhoc.py --template Makefile # !Makefile adhoc_unpack
adhoc.py --template README.css # !README.css adhoc_unpack
adhoc.py --template doc/Makefile # !doc/Makefile adhoc_unpack
adhoc.py --template doc/_static/adhoc-logo-32.ico # !doc/_static/adhoc-logo-32.ico adhoc_unpack
adhoc.py --template doc/adhoc-logo.svg # !doc/adhoc-logo.svg adhoc_unpack
adhoc.py --template doc/conf.py # !doc/conf.py adhoc_unpack
adhoc.py --template doc/make.bat # !doc/make.bat adhoc_unpack
adhoc.py --template doc/z-massage-index.sh # !doc/z-massage-index.sh adhoc_unpack
adhoc.py --template setup.py # !setup.py adhoc_unpack
================================================= ================================ ================
``python adhoc.py --template`` prints the standard template ``-``
(closing delimiter replaced by ellipsis):
>>> ign = main('./adhoc.py --template'.split()) #doctest: +ELLIPSIS
# @:adhoc_disable... allow modification of exploded sources in original place
sys.path.append('__adhoc__')
# @:adhoc_disable...
<BLANKLINE>
# @:adhoc_run_time... The run-time class goes here
# @:adhoc_run_time_engine... settings enabled at run-time
# @:adhoc_enable...
# RtAdHoc.flat = False
# @:adhoc_enable...
# @:adhoc_run_time_engine...
<BLANKLINE>
#import adhoc # @:adhoc...
``python adhoc.py --template test`` prints the template named ``-test``.
the leading ``-`` signifies disposition to standard output:
>>> ign = main('./adhoc.py --template test'.split())
Test template.
Extract
-------
The default destination for extracting files is the current working
directory.
Files extracted consist of
- packed files generated by |adhoc_include|
- templates generated by |adhoc_verbatim|
- templates with a file destination other than standard output
``python adhoc.py --extract __adhoc_extract__`` unpacks the following files into
directory ``__adhoc_extract__``:
>>> import shutil
>>> ign = main('./adhoc.py --extract __adhoc_extract__'.split())
>>> file_list = []
>>> for dir, subdirs, files in os.walk('__adhoc_extract__'):
... file_list.extend([os.path.join(dir, file_) for file_ in files])
>>> for file_ in sorted(file_list):
... printf(file_)
__adhoc_extract__/MANIFEST.in
__adhoc_extract__/Makefile
__adhoc_extract__/README.css
__adhoc_extract__/README.txt
__adhoc_extract__/doc/Makefile
__adhoc_extract__/doc/USE_CASES.txt
__adhoc_extract__/doc/_static/adhoc-logo-32.ico
__adhoc_extract__/doc/adhoc-logo.svg
__adhoc_extract__/doc/conf.py
__adhoc_extract__/doc/index.rst
__adhoc_extract__/doc/make.bat
__adhoc_extract__/doc/z-massage-index.sh
__adhoc_extract__/docutils.conf
__adhoc_extract__/setup.py
__adhoc_extract__/use_case_000_.py
__adhoc_extract__/use_case_001_templates_.py
__adhoc_extract__/use_case_002_include_.py
__adhoc_extract__/use_case_003_import_.py
__adhoc_extract__/use_case_005_nested_.py
>>> shutil.rmtree('__adhoc_extract__')
Export
------
The default destination for exporting files is the
subdirectory ``__adhoc__``.
Files exported consist of
- imported modules generated by |adhoc|
- all files covered in section `Extract`_
``python adhoc.py --explode __adhoc_explode__`` unpacks the following files into
directory ``__adhoc_explode__``:
>>> import shutil
>>> ign = main('./adhoc.py --explode __adhoc_explode__'.split())
>>> file_list = []
>>> for dir, subdirs, files in os.walk('__adhoc_explode__'):
... file_list.extend([os.path.join(dir, file_) for file_ in files])
>>> for file_ in sorted(file_list):
... printf(file_)
__adhoc_explode__/MANIFEST.in
__adhoc_explode__/Makefile
__adhoc_explode__/README.css
__adhoc_explode__/README.txt
__adhoc_explode__/adhoc.py
__adhoc_explode__/adhoc_test/__init__.py
__adhoc_explode__/adhoc_test/sub/__init__.py
__adhoc_explode__/argparse_local.py
__adhoc_explode__/doc/Makefile
__adhoc_explode__/doc/USE_CASES.txt
__adhoc_explode__/doc/_static/adhoc-logo-32.ico
__adhoc_explode__/doc/adhoc-logo.svg
__adhoc_explode__/doc/conf.py
__adhoc_explode__/doc/index.rst
__adhoc_explode__/doc/make.bat
__adhoc_explode__/doc/z-massage-index.sh
__adhoc_explode__/docutils.conf
__adhoc_explode__/namespace_dict.py
__adhoc_explode__/rt_adhoc.py
__adhoc_explode__/setup.py
__adhoc_explode__/stringformat_local.py
__adhoc_explode__/use_case_000_.py
__adhoc_explode__/use_case_001_templates_.py
__adhoc_explode__/use_case_002_include_.py
__adhoc_explode__/use_case_003_import_.py
__adhoc_explode__/use_case_005_nested_.py
>>> shutil.rmtree('__adhoc_explode__')
File Permissions
================
- File mode is restored.
- File ownership is not restored.
- File modification times are restored.
Since only naive datetimes are recorded, this only works correctly
within the same timezone.
.. :ide: COMPILE: render reST as HTML
.. . (let* ((fp (buffer-file-name)) (fn (file-name-nondirectory fp))) (save-match-data (if (string-match-t "[.][^.]*$" fn) (setq fn (replace-match "" nil t fn)))) (let ((args (concat " " fp " | ws_rst2html.py --traceback --cloak-email-addresses | tee " fn ".html "))) (save-buffer) (compile (concat "PATH=\".:$PATH\"; cat " args))))
..
.. Local Variables:
.. mode: rst
.. snip-mode: rst
.. truncate-lines: t
.. symbol-tag-symbol-regexp: "[-0-9A-Za-z_#]\\([-0-9A-Za-z_. ]*[-0-9A-Za-z_]\\|\\)"
.. symbol-tag-auto-comment-mode: nil
.. symbol-tag-srx-is-safe-with-nil-delimiters: nil
.. End:
| PypiClean |
/DXC_AI_MBN-0.0.35-py3-none-any.whl/dxc/ai/run_model/run_model.py | import arrow #normalizing dates
import numpy as np
from sklearn.base import TransformerMixin #impute missing data
from auto_ml import Predictor #ML models
from sklearn.model_selection import train_test_split
import os
import pickle
from contextlib import redirect_stdout
import warnings
import io
from dxc.ai.global_variables import globals_file
from .TimeSeriesModels import getBestForcastingModel
# define the general class of models
class model:
__model = []
def build(self, meta_data): raise NotImplementedError()
def train_and_score(self, data): raise NotImplementedError()
def interpret(self): raise NotImplementedError()
def python_object(): raise NotImplementedError()
@staticmethod
def meta_data_key(meta_data, value):
key_list = list(meta_data.keys())
val_list = list(meta_data.values())
return key_list[val_list.index(value)]
#define the model lifecycle
# define a prediction model
class prediction(model):
@property
def estimator(self):
raise NotImplementedError()
def build(self, meta_data):
self.__model = Predictor(type_of_estimator=self.estimator, column_descriptions=meta_data)
self.__label = self.meta_data_key(meta_data, "output")
def train_and_score(self, data, labels, verbose):
# create training and test data
training_data, test_data = train_test_split(data, test_size=0.2)
# train the model
if verbose == False:
warnings.filterwarnings('ignore')
text_trap = io.StringIO()
with redirect_stdout(text_trap):
self.__model.train(training_data, verbose=False, ml_for_analytics= False)
else:
warnings.filterwarnings('ignore')
self.__model.train(training_data, verbose=True, ml_for_analytics=False)
# score the model
if verbose == False:
self.__model.score(test_data, test_data[self.__label], verbose=0)
else:
self.__model.score(test_data, test_data[self.__label], verbose=1)
def interpret(self):
pass
def python_object(self):
return self.__model
# define a regressor model
class regression(prediction):
@property
def estimator(self):
return("regressor")
# define a classification model
class classification(prediction):
@property
def estimator(self):
return("classifier")
def run_experiment(design, verbose = False):
if design["model"] == 'timeseries':
trained_model = getBestForcastingModel(design['labels'], no_predictions=7, debug=verbose, visualize = False)
return trained_model
globals_file.run_experiment_used = True
design["model"].build(design["meta_data"])
design["model"].train_and_score(design["data"], design["labels"], verbose)
design["model"].interpret()
return design["model"].python_object() | PypiClean |
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/anki/sound.py |
import re, sys, threading, time, subprocess, os, atexit
import random
from anki.hooks import addHook
from anki.utils import tmpdir, isWin, isMac
# Shared utils
##########################################################################
_soundReg = "\[sound:(.*?)\]"
def playFromText(text):
for match in re.findall(_soundReg, text):
play(match)
def stripSounds(text):
return re.sub(_soundReg, "", text)
def hasSound(text):
return re.search(_soundReg, text) is not None
##########################################################################
processingSrc = u"rec.wav"
processingDst = u"rec.mp3"
processingChain = []
recFiles = []
processingChain = [
["lame", "rec.wav", processingDst, "--noreplaygain", "--quiet"],
]
# don't show box on windows
if isWin:
si = subprocess.STARTUPINFO()
try:
si.dwFlags |= subprocess.STARTF_USESHOWWINDOW
except:
# python2.7+
si.dwFlags |= subprocess._subprocess.STARTF_USESHOWWINDOW
else:
si = None
if isMac:
# make sure lame, which is installed in /usr/local/bin, is in the path
os.environ['PATH'] += ":" + "/usr/local/bin"
dir = os.path.dirname(os.path.abspath(__file__))
dir = os.path.abspath(dir + "/../../../..")
os.environ['PATH'] += ":" + dir + "/audio"
def retryWait(proc):
# osx throws interrupted system call errors frequently
while 1:
try:
return proc.wait()
except OSError:
continue
# Mplayer settings
##########################################################################
if isWin:
mplayerCmd = ["mplayer.exe", "-ao", "win32"]
dir = os.path.dirname(os.path.abspath(sys.argv[0]))
os.environ['PATH'] += ";" + dir
os.environ['PATH'] += ";" + dir + "\\..\\win\\top" # for testing
else:
mplayerCmd = ["mplayer"]
mplayerCmd += ["-really-quiet", "-noautosub"]
# Mplayer in slave mode
##########################################################################
mplayerQueue = []
mplayerManager = None
mplayerReader = None
mplayerEvt = threading.Event()
mplayerClear = False
class MplayerMonitor(threading.Thread):
def run(self):
global mplayerClear
self.mplayer = None
self.deadPlayers = []
while 1:
mplayerEvt.wait()
mplayerEvt.clear()
# clearing queue?
if mplayerClear and self.mplayer:
try:
self.mplayer.stdin.write("stop\n")
except:
# mplayer quit by user (likely video)
self.deadPlayers.append(self.mplayer)
self.mplayer = None
# loop through files to play
while mplayerQueue:
# ensure started
if not self.mplayer:
self.startProcess()
# pop a file
try:
item = mplayerQueue.pop(0)
except IndexError:
# queue was cleared by main thread
continue
if mplayerClear:
mplayerClear = False
extra = ""
else:
extra = " 1"
cmd = 'loadfile "%s"%s\n' % (item, extra)
try:
self.mplayer.stdin.write(cmd)
except:
# mplayer has quit and needs restarting
self.deadPlayers.append(self.mplayer)
self.mplayer = None
self.startProcess()
self.mplayer.stdin.write(cmd)
# if we feed mplayer too fast it loses files
time.sleep(1)
# wait() on finished processes. we don't want to block on the
# wait, so we keep trying each time we're reactivated
def clean(pl):
if pl.poll() is not None:
pl.wait()
return False
else:
return True
self.deadPlayers = [pl for pl in self.deadPlayers if clean(pl)]
def kill(self):
if not self.mplayer:
return
try:
self.mplayer.stdin.write("quit\n")
self.deadPlayers.append(self.mplayer)
except:
pass
self.mplayer = None
def startProcess(self):
try:
cmd = mplayerCmd + ["-slave", "-idle"]
devnull = file(os.devnull, "w")
self.mplayer = subprocess.Popen(
cmd, startupinfo=si, stdin=subprocess.PIPE,
stdout=devnull, stderr=devnull)
except OSError:
mplayerEvt.clear()
raise Exception("Did you install mplayer?")
def queueMplayer(path):
ensureMplayerThreads()
if isWin and os.path.exists(path):
# mplayer on windows doesn't like the encoding, so we create a
# temporary file instead. oddly, foreign characters in the dirname
# don't seem to matter.
dir = tmpdir()
name = os.path.join(dir, "audio%s%s" % (
random.randrange(0, 1000000), os.path.splitext(path)[1]))
f = open(name, "wb")
f.write(open(path, "rb").read())
f.close()
# it wants unix paths, too!
path = name.replace("\\", "/")
path = path.encode(sys.getfilesystemencoding())
else:
path = path.encode("utf-8")
mplayerQueue.append(path)
mplayerEvt.set()
def clearMplayerQueue():
global mplayerClear, mplayerQueue
mplayerQueue = []
mplayerClear = True
mplayerEvt.set()
def ensureMplayerThreads():
global mplayerManager
if not mplayerManager:
mplayerManager = MplayerMonitor()
mplayerManager.daemon = True
mplayerManager.start()
# ensure the tmpdir() exit handler is registered first so it runs
# after the mplayer exit
tmpdir()
# clean up mplayer on exit
atexit.register(stopMplayer)
def stopMplayer(*args):
if not mplayerManager:
return
mplayerManager.kill()
addHook("unloadProfile", stopMplayer)
# PyAudio recording
##########################################################################
try:
import pyaudio
import wave
PYAU_FORMAT = pyaudio.paInt16
PYAU_CHANNELS = 1
PYAU_INPUT_INDEX = None
except:
pass
class _Recorder(object):
def postprocess(self, encode=True):
self.encode = encode
for c in processingChain:
#print c
if not self.encode and c[0] == 'lame':
continue
try:
ret = retryWait(subprocess.Popen(c, startupinfo=si))
except:
ret = True
if ret:
raise Exception(_(
"Error running %s") %
u" ".join(c))
class PyAudioThreadedRecorder(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.finish = False
def run(self):
chunk = 1024
try:
p = pyaudio.PyAudio()
except NameError:
raise Exception(
"Pyaudio not installed (recording not supported on OSX10.3)")
rate = int(p.get_default_input_device_info()['defaultSampleRate'])
stream = p.open(format=PYAU_FORMAT,
channels=PYAU_CHANNELS,
rate=rate,
input=True,
input_device_index=PYAU_INPUT_INDEX,
frames_per_buffer=chunk)
all = []
while not self.finish:
try:
data = stream.read(chunk)
except IOError, e:
if e[1] == pyaudio.paInputOverflowed:
data = None
else:
raise
if data:
all.append(data)
stream.close()
p.terminate()
data = ''.join(all)
wf = wave.open(processingSrc, 'wb')
wf.setnchannels(PYAU_CHANNELS)
wf.setsampwidth(p.get_sample_size(PYAU_FORMAT))
wf.setframerate(rate)
wf.writeframes(data)
wf.close()
class PyAudioRecorder(_Recorder):
def __init__(self):
for t in recFiles + [processingSrc, processingDst]:
try:
os.unlink(t)
except OSError:
pass
self.encode = False
def start(self):
self.thread = PyAudioThreadedRecorder()
self.thread.start()
def stop(self):
self.thread.finish = True
self.thread.join()
def file(self):
if self.encode:
tgt = u"rec%d.mp3" % time.time()
os.rename(processingDst, tgt)
return tgt
else:
return processingSrc
# Audio interface
##########################################################################
_player = queueMplayer
_queueEraser = clearMplayerQueue
def play(path):
_player(path)
def clearAudioQueue():
_queueEraser()
Recorder = PyAudioRecorder | PypiClean |
/FamcyDev-0.3.71-py3-none-any.whl/Famcy/bower_components/bootstrap-fileinput/js/locales/sk.js | (function ($) {
"use strict";
$.fn.fileinputLocales['sk'] = {
fileSingle: 'súbor',
filePlural: 'súbory',
browseLabel: 'Vybrať …',
removeLabel: 'Odstrániť',
removeTitle: 'Vyčistiť vybraté súbory',
cancelLabel: 'Storno',
cancelTitle: 'Prerušiť nahrávanie',
pauseLabel: 'Pozastaviť',
pauseTitle: 'Pozastaviť prebiehajúce nahrávanie',
uploadLabel: 'Nahrať',
uploadTitle: 'Nahrať vybraté súbory',
msgNo: 'Nie',
msgNoFilesSelected: '',
msgPaused: 'Pozastavené',
msgCancelled: 'Zrušené',
msgPlaceholder: 'Vybrať {files} ...',
msgZoomModalHeading: 'Detailný náhľad',
msgFileRequired: 'Musíte vybrať súbor, ktorý chcete nahrať.',
msgSizeTooSmall: 'Súbor "{name}" (<b>{size} KB</b>) je príliš malý, musí mať veľkosť najmenej <b>{minSize} KB</b>.',
msgSizeTooLarge: 'Súbor "{name}" (<b>{size} KB</b>) je príliš veľký, maximálna povolená veľkosť <b>{maxSize} KB</b>.',
msgFilesTooLess: 'Musíte vybrať najmenej <b>{n}</b> {files} pre nahranie.',
msgFilesTooMany: 'Počet vybratých súborov <b>({n})</b> prekročil maximálny povolený limit <b>{m}</b>.',
msgTotalFilesTooMany: 'Môžete nahrať maximálne <b>{m}</b> súborov (zistených <b>{n}</b> súborov).',
msgFileNotFound: 'Súbor "{name}" nebol nájdený!',
msgFileSecured: 'Zabezpečenie súboru znemožnilo čítať súbor "{name}".',
msgFileNotReadable: 'Súbor "{name}" nie je čitateľný.',
msgFilePreviewAborted: 'Náhľad súboru bol prerušený pre "{name}".',
msgFilePreviewError: 'Nastala chyba pri načítaní súboru "{name}".',
msgInvalidFileName: 'Invalid or unsupported characters in file name "{name}".',
msgInvalidFileType: 'Neplatný typ súboru "{name}". Iba "{types}" súborov sú podporované.',
msgInvalidFileExtension: 'Neplatná extenzia súboru "{name}". Iba "{extensions}" súborov sú podporované.',
msgFileTypes: {
'image': 'obrázok',
'html': 'HTML',
'text': 'text',
'video': 'video',
'audio': 'audio',
'flash': 'flash',
'pdf': 'PDF',
'object': 'object'
},
msgUploadAborted: 'Nahrávanie súboru bolo prerušené',
msgUploadThreshold: 'Spracovávam …',
msgUploadBegin: 'Inicializujem …',
msgUploadEnd: 'Hotovo',
msgUploadResume: 'Obnovuje sa nahrávanie …',
msgUploadEmpty: 'Na nahrávanie nie sú k dispozícii žiadne platné údaje.',
msgUploadError: 'Chyba pri nahrávaní',
msgDeleteError: 'Chyba pri odstraňovaní',
msgProgressError: 'Chyba',
msgValidationError: 'Chyba overenia',
msgLoading: 'Nahrávanie súboru {index} z {files} …',
msgProgress: 'Nahrávanie súboru {index} z {files} - {name} - {percent}% dokončené.',
msgSelected: '{n} {files} vybraté',
msgProcessing: 'Spracovávam ...',
msgFoldersNotAllowed: 'Tiahni a pusť iba súbory! Vynechané {n} pustené prečinok(y).',
msgImageWidthSmall: 'Šírka obrázku "{name}", musí byť minimálne {size} px.',
msgImageHeightSmall: 'Výška obrázku "{name}", musí byť minimálne {size} px.',
msgImageWidthLarge: 'Šírka obrázku "{name}" nemôže presiahnuť {size} px.',
msgImageHeightLarge: 'Výška obrázku "{name}" nesmie presiahnuť {size} px.',
msgImageResizeError: 'Nepodarilo sa získať veľkosť obrázka pre zmenu veľkosti.',
msgImageResizeException: 'Chyba pri zmene veľkosti obrázka.<pre>{errors}</pre>',
msgAjaxError: 'Pri operácii {operation} sa vyskytla chyba. Skúste to prosím neskôr!',
msgAjaxProgressError: '{operation} - neúspešné',
msgDuplicateFile: 'Súbor "{name}" rovnakej veľkosti "{size} KB" už bol vybratý skôr. Preskočenie duplicitného výberu.',
msgResumableUploadRetriesExceeded: 'Nahrávanie bolo prerušené po <b>{max}</b> opakovaniach súboru <b>{file}</b>! Detaily chyby: <pre>{error}</pre>',
msgPendingTime: '{time} zostáva',
msgCalculatingTime: 'výpočet zostávajúceho času',
ajaxOperations: {
deleteThumb: 'odstrániť súbor',
uploadThumb: 'nahrať súbor',
uploadBatch: 'nahrať várku súborov',
uploadExtra: 'odosielanie údajov z formulára'
},
dropZoneTitle: 'Tiahni a pusť súbory tu …',
dropZoneClickTitle: '<br>(alebo kliknite sem a vyberte {files})',
fileActionSettings: {
removeTitle: 'Odstrániť súbor',
uploadTitle: 'Nahrať súbor',
uploadRetryTitle: 'Znova nahrať',
downloadTitle: 'Stiahnuť súbor',
zoomTitle: 'Zobraziť podrobnosti',
dragTitle: 'Posunúť / Preskládať',
indicatorNewTitle: 'Ešte nenahral',
indicatorSuccessTitle: 'Nahraný',
indicatorErrorTitle: 'Chyba pri nahrávaní',
indicatorPausedTitle: 'Nahrávanie bolo pozastavené',
indicatorLoadingTitle: 'Nahrávanie …'
},
previewZoomButtonTitles: {
prev: 'Zobraziť predchádzajúci súbor',
next: 'Zobraziť následujúci súbor',
toggleheader: 'Prepnúť záhlavie',
fullscreen: 'Prepnúť zobrazenie na celú obrazovku',
borderless: 'Prepnúť na bezrámikové zobrazenie',
close: 'Zatvoriť detailný náhľad'
}
};
})(window.jQuery); | PypiClean |
/BitGlitter-2.0.0.tar.gz/BitGlitter-2.0.0/bitglitter/config/readfunctions.py | import json
import logging
from pathlib import Path
import cv2
from bitglitter.config.config import session
from bitglitter.config.configmodels import Constants
from bitglitter.config.palettemodels import Palette
from bitglitter.config.readmodels.readmodels import StreamFrame, StreamSHA256Blacklist
from bitglitter.config.readmodels.streamread import StreamRead
from bitglitter.read.decode.headerdecode import initializer_header_validate_decode, metadata_header_validate_decode
from bitglitter.read.scan.scanhandler import ScanHandler
from bitglitter.read.scan.scanvalidate import frame_lock_on
from bitglitter.utilities.filemanipulation import refresh_directory, remove_working_folder
from bitglitter.utilities.loggingset import logging_setter
logging_setter(logging_level='info', logging_stdout_output=True, logging_txt_output=False)
def unpackage(stream_sha256):
"""Attempt to unpackage as much of the file as possible. Returns None if stream doesn't exist, returns False if
there is a decryption error.
"""
stream_read = StreamRead.query.filter(StreamRead.stream_sha256 == stream_sha256).first()
if not stream_read:
return False
constants = session.query(Constants).first()
working_directory = Path(constants.WORKING_DIR)
refresh_directory(working_directory)
results = stream_read.attempt_unpackage(working_directory)
stream_read.autodelete_attempt()
remove_working_folder(working_directory)
return results
def return_all_read_information(advanced=False):
stream_reads = StreamRead.query.all()
returned_list = []
for stream_read in stream_reads:
returned_list.append(stream_read.return_state(advanced))
return returned_list
def return_single_read_information(stream_sha256, advanced=False):
stream_read = StreamRead.query.filter(StreamRead.stream_sha256 == stream_sha256).first()
if not stream_read:
return False
else:
return stream_read.return_state(advanced)
def update_decrypt_values(stream_sha256, decryption_key=None, scrypt_n=None, scrypt_r=None, scrypt_p=None):
stream_read = StreamRead.query.filter(StreamRead.stream_sha256 == stream_sha256).first()
if not stream_read:
return False
if not stream_read.encryption_enabled:
return True
if decryption_key:
stream_read.decryption_key = decryption_key
if scrypt_n:
stream_read.scrypt_n = scrypt_n
if scrypt_r:
stream_read.scrypt_r = scrypt_r
if scrypt_p:
stream_read.scrypt_p = scrypt_p
stream_read.toggle_eligibility_calculations(True)
return True
def attempt_metadata_decrypt(stream_sha256):
stream_read = StreamRead.query.filter(StreamRead.stream_sha256 == stream_sha256).first()
if not stream_read:
return False
if not stream_read.file_masking_enabled:
return {'error': 'File masking not enabled'}
if not stream_read.decryption_key:
return {'error': 'No decryption key '}
if stream_read.manifest_string:
return {'error': 'Metadata has already been decrypted'}
results = metadata_header_validate_decode(stream_read.encrypted_metadata_header_bytes, None,
stream_read.decryption_key, True, stream_read.file_masking_enabled,
stream_read.scrypt_n, stream_read.scrypt_r, stream_read.scrypt_p,
frame_processor=False)
if 'bg_version' in results:
bg_version = results['bg_version']
stream_name = results['stream_name']
stream_description = results['stream_description']
time_created = results['time_created']
manifest_string = results['manifest_string']
stream_read.metadata_header_load(bg_version, stream_name, stream_description, time_created, manifest_string)
return {'metadata': stream_read.metadata_checkpoint_return()}
else:
return {'error': 'Incorrect decryption value(s)'}
def return_stream_manifest(stream_sha256, return_as_json=False):
stream_read = StreamRead.query.filter(StreamRead.stream_sha256 == stream_sha256).first()
if not stream_read:
return False
if not stream_read.manifest_string:
return {'error': 'Metadata header not decoded yet'}
if stream_read.encrypted_metadata_header_bytes:
return {'error': 'Metadata not decrypted yet'}
if return_as_json:
return stream_read.manifest_string
else:
return json.loads(stream_read.manifest_string)
def remove_partial_save(stream_sha256):
stream_read = StreamRead.query.filter(StreamRead.stream_sha256 == stream_sha256).first()
if not stream_read:
return False
stream_read.delete()
return True
def remove_all_partial_save_data():
"""Removes all data for partial saves, both files and metadata within some internal classes."""
session.query(StreamRead).delete()
session.commit()
return True
def update_stream_read(stream_sha256, auto_delete_finished_stream=None, auto_unpackage_stream=None):
"""Will get larger as more config options are added; general settings about the stream are changed with this."""
stream_read = StreamRead.query.filter(StreamRead.stream_sha256 == stream_sha256).first()
if not stream_read:
return False
if isinstance(auto_delete_finished_stream, bool):
stream_read.auto_delete_finished_stream = auto_delete_finished_stream
if isinstance(auto_unpackage_stream, bool):
stream_read.auto_unpackage_stream = auto_unpackage_stream
stream_read.save()
return True
def blacklist_stream_sha256(stream_sha256):
if not isinstance(stream_sha256, str):
raise ValueError('Must be type str')
if len(stream_sha256) != 64:
raise ValueError('Stream IDs are 64 characters long')
hex_characters = '1234567890abcdef'
sha256_lowercase = stream_sha256.lower()
for character in sha256_lowercase:
if character not in hex_characters:
raise ValueError('Not a valid stream ID')
existing_blacklist = StreamSHA256Blacklist.query \
.filter(StreamSHA256Blacklist.stream_sha256 == sha256_lowercase).first()
if existing_blacklist:
raise ValueError('Blacklisted stream is already in database')
stream_read = StreamRead.query.filter(StreamRead.stream_sha256 == sha256_lowercase).first()
if stream_read:
stream_read.delete()
StreamSHA256Blacklist.create(stream_sha256=sha256_lowercase)
return True
def return_all_blacklist_sha256():
returned_list = []
blacklisted_streams = StreamSHA256Blacklist.query.all()
for blacklisted_item in blacklisted_streams:
returned_list.append(blacklisted_item.stream_sha256)
return returned_list
def remove_blacklist_sha256(stream_sha256):
blacklist = StreamSHA256Blacklist.query.filter(StreamSHA256Blacklist.stream_sha256 == stream_sha256) \
.first()
if not blacklist:
return False
blacklist.delete()
return True
def remove_all_blacklist_sha256():
session.query(StreamSHA256Blacklist).delete()
session.commit()
return True
def return_stream_frame_data(stream_sha256):
stream_read = StreamRead.query.filter(StreamRead.stream_sha256 == stream_sha256).first()
if not stream_read:
return False
frames = stream_read.frames.order_by(StreamFrame.frame_number.asc()).all()
returned_list = []
for frame in frames:
returned_list.append({'is_complete': frame.is_complete, 'added_to_progress': frame.added_to_progress,
'payload_bits': frame.payload_bits, 'frame_number': frame.frame_number})
return returned_list
def return_stream_file_data(stream_sha256, advanced=False):
stream_read = StreamRead.query.filter(StreamRead.stream_sha256 == stream_sha256).first()
if not stream_read:
return False
files = stream_read.files.all()
returned_list = []
for file in files:
returned_list.append(file.return_state(advanced))
return returned_list
def return_stream_progress_data(stream_sha256):
stream_read = StreamRead.query.filter(StreamRead.stream_sha256 == stream_sha256).first()
if not stream_read:
return False
progress = stream_read.progress.all()
returned_list = []
for progress_group in progress:
returned_list.append({'bit_start_position': progress_group.bit_start_position, 'bit_end_position':
progress_group.bit_end_position})
return returned_list
def verify_is_bitglitter_file(file_path: str):
"""Returns True or False depending on if the image or first video frame is detected as valid. It does this through
testing if it can lock onto the frame, and read the initializer."""
# Disabling log messages in the other functions; we just want a simple True or False returned.
logger = logging.getLogger()
logger.disabled = True
constants = session.query(Constants).first()
valid_image_formats = constants.return_valid_image_formats()
valid_video_formats = constants.return_valid_video_formats()
path = Path(file_path)
if path.suffix not in valid_image_formats + valid_video_formats:
return False
if path.suffix in valid_image_formats:
frame = cv2.imread(file_path)
else:
active_video = cv2.VideoCapture(file_path)
frame = active_video.read()[1]
frame_pixel_width = frame.shape[1]
frame_pixel_height = frame.shape[0]
initializer_palette_a = Palette.query.filter(Palette.palette_id == '1').first()
initializer_palette_b = Palette.query.filter(Palette.palette_id == '11').first()
initializer_palette_a_color_set = initializer_palette_a.convert_colors_to_tuple()
initializer_palette_b_color_set = initializer_palette_b.convert_colors_to_tuple()
initializer_palette_a_dict = initializer_palette_a.return_decoder()
initializer_palette_b_dict = initializer_palette_b.return_decoder()
lock_on_results = frame_lock_on(frame, None, None, frame_pixel_height, frame_pixel_width,
initializer_palette_a_color_set, initializer_palette_b_color_set,
initializer_palette_a_dict, initializer_palette_b_dict)
if not lock_on_results:
return False
block_height = lock_on_results['block_height']
block_width = lock_on_results['block_width']
pixel_width = lock_on_results['pixel_width']
scan_handler = ScanHandler(frame, True, initializer_palette_a, initializer_palette_a_dict,
initializer_palette_a_color_set, block_height, block_width, pixel_width)
initializer_bits = scan_handler.return_initializer_bits()['bits']
if not initializer_header_validate_decode(initializer_bits, block_height, block_width):
return False
return True | PypiClean |
/B9gemyaeix-4.14.1.tar.gz/B9gemyaeix-4.14.1/docs/contributing/modules.rst | Contributing to Weblate modules
===============================
Besides the main repository, Weblate consists of several Python modules. All
these follow same structure and this documentation covers them all.
For example, this covers:
* `wlc <https://github.com/WeblateOrg/wlc/>`_, Python client library, see :ref:`wlc`
* `translation-finder <https://github.com/WeblateOrg/translation-finder/>`_, used to discover translatable files in the repository
* `language-data <https://github.com/WeblateOrg/language-data/>`_, language definitions for Weblate, see :ref:`languages`
.. include:: snippets/code-guide.rst
Running tests
-------------
The tests are executed using :program:`py.test`. First you need to install test requirements:
.. code-block:: sh
pip install -r requirements-test.txt
You can then execute the testsuite in the repository checkout:
.. code-block:: sh
py.test
.. seealso::
The CI integration is very similar to :doc:`tests`.
.. include:: snippets/pre-commit.rst
.. seealso::
:doc:`code`
| PypiClean |
/NEMO_CE-1.6.12-py3-none-any.whl/NEMO/views/safety.py | from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.db.models import Case, When
from django.shortcuts import get_object_or_404, redirect, render
from django.urls import reverse
from django.views.decorators.http import require_GET, require_http_methods
from NEMO.decorators import staff_member_required
from NEMO.forms import SafetyIssueCreationForm, SafetyIssueUpdateForm
from NEMO.models import Chemical, ChemicalHazard, Notification, SafetyCategory, SafetyIssue, SafetyItem
from NEMO.templatetags.custom_tags_and_filters import navigation_url
from NEMO.utilities import (
BasicDisplayTable,
EmailCategory,
distinct_qs_value_list,
export_format_datetime,
get_full_url,
queryset_search_filter,
render_email_template,
send_mail,
)
from NEMO.views.customization import EmailsCustomization, SafetyCustomization, get_media_file_contents
from NEMO.views.notifications import create_safety_notification, delete_notification, get_notifications
@login_required
@require_GET
def safety(request):
safety_items_expand_categories = SafetyCustomization.get_bool("safety_items_expand_categories")
dictionary = safety_dictionary("")
if not dictionary["show_safety"]:
if not dictionary["show_safety_issues"]:
if not dictionary["show_safety_data_sheets"]:
return redirect("safety_issues")
return redirect("safety_data_sheets")
return redirect("safety_issues")
return redirect("safety_categories") if not safety_items_expand_categories else redirect("safety_all_in_one")
@login_required
@require_GET
def safety_all_in_one(request):
dictionary = safety_dictionary("safety")
dictionary["safety_categories"] = SafetyCategory.objects.filter(
id__in=distinct_qs_value_list(SafetyItem.objects.all(), "category_id")
)
dictionary["safety_general"] = SafetyItem.objects.filter(category__isnull=True)
dictionary["safety_items_expand_categories"] = True
return render(request, "safety/safety.html", dictionary)
@login_required
@require_GET
def safety_categories(request, category_id=None):
dictionary = safety_dictionary("safety")
try:
safety_item_id = request.GET.get("safety_item_id")
if safety_item_id:
category_id = SafetyItem.objects.get(pk=safety_item_id).category.id
SafetyCategory.objects.get(pk=category_id)
except SafetyCategory.DoesNotExist:
pass
safety_items_qs = SafetyItem.objects.filter(category_id=category_id)
if not category_id and not safety_items_qs.exists():
first_category = SafetyCategory.objects.first()
category_id = first_category.id if first_category else None
dictionary.update(
{
"category_id": category_id,
"safety_items": SafetyItem.objects.filter(category_id=category_id),
"safety_categories": SafetyCategory.objects.filter(
id__in=distinct_qs_value_list(SafetyItem.objects.all(), "category_id")
),
"safety_general": SafetyItem.objects.filter(category_id__isnull=True).exists(),
}
)
return render(request, "safety/safety.html", dictionary)
@login_required
@require_GET
def safety_item(request, safety_item_id: int):
# Redirect to the appropriate URL with hashtag included to scroll to the item
safety_items_expand_categories = SafetyCustomization.get_bool("safety_items_expand_categories")
url_params = f"?safety_item_id={safety_item_id}#safety_item_{safety_item_id}"
redirect_url = reverse("safety_categories") if not safety_items_expand_categories else reverse("safety_all_in_one")
return redirect(redirect_url + url_params)
@login_required
@require_GET
def safety_items_search(request):
return queryset_search_filter(SafetyItem.objects.all(), ["name", "description"], request)
@login_required
@require_http_methods(["GET", "POST"])
def safety_issues(request):
dictionary = safety_dictionary("safety_issues")
tickets = SafetyIssue.objects.filter(resolved=False).order_by("-creation_time")
if not request.user.is_staff:
tickets = tickets.filter(visible=True)
dictionary["tickets"] = tickets
dictionary["notifications"] = get_notifications(request.user, Notification.Types.SAFETY)
return render(request, "safety/safety_issues.html", dictionary)
@login_required
@require_http_methods(["GET", "POST"])
def create_safety_issue(request):
dictionary = safety_dictionary("safety_issues")
if request.method == "POST":
form = SafetyIssueCreationForm(request.user, data=request.POST)
if form.is_valid():
issue = form.save()
send_safety_email_notification(request, issue)
create_safety_notification(issue)
messages.success(request, "Your safety concern was sent to the staff and will be addressed promptly")
return redirect("safety_issues")
return render(request, "safety/safety_issues_create.html", dictionary)
def send_safety_email_notification(request, issue):
recipient = EmailsCustomization.get("safety_email_address")
message = get_media_file_contents("safety_issue_email.html")
if recipient and message:
subject = "Safety issue"
dictionary = {"issue": issue, "issue_absolute_url": get_full_url(issue.get_absolute_url(), request)}
rendered_message = render_email_template(message, dictionary, request)
from_email = issue.reporter.email if issue.reporter else recipient
send_mail(
subject=subject,
content=rendered_message,
from_email=from_email,
to=[recipient],
email_category=EmailCategory.SAFETY,
)
@login_required
@require_GET
def resolved_safety_issues(request):
dictionary = safety_dictionary("safety_issues")
tickets = SafetyIssue.objects.filter(resolved=True)
if not request.user.is_staff:
tickets = tickets.filter(visible=True)
dictionary["tickets"] = tickets
return render(request, "safety/safety_issues_resolved.html", dictionary)
@staff_member_required
@require_http_methods(["GET", "POST"])
def update_safety_issue(request, ticket_id):
dictionary = safety_dictionary("safety_issues")
if request.method == "POST":
ticket = get_object_or_404(SafetyIssue, id=ticket_id)
form = SafetyIssueUpdateForm(request.user, data=request.POST, instance=ticket)
if form.is_valid():
issue = form.save()
if issue.resolved:
delete_notification(Notification.Types.SAFETY, issue.id)
messages.success(request, "This safety issue was updated successfully")
return redirect("safety_issues")
dictionary["ticket"] = get_object_or_404(SafetyIssue, id=ticket_id)
return render(request, "safety/safety_issues_update.html", dictionary)
@login_required
@require_GET
def safety_data_sheets(request):
chemicals = Chemical.objects.all().prefetch_related("hazards").order_by()
hazards = ChemicalHazard.objects.all()
for hazard in hazards:
chemicals = chemicals.annotate(
**{f"hazard_{hazard.id}": Case(When(hazards__in=[hazard.id], then=True), default=False)}
)
order_by = request.GET.get("o", "name")
reverse_order = order_by.startswith("-")
order = order_by[1:] if reverse_order else order_by
chemicals = list(set(chemicals))
if order == "name":
chemicals.sort(key=lambda x: x.name.lower(), reverse=reverse_order)
elif order.startswith("hazard_"):
hazard_id = int(order[7:])
chemicals.sort(key=lambda x: x.name.lower())
chemicals.sort(key=lambda x: hazard_id in [h.id for h in x.hazards.all()], reverse=not reverse_order)
dictionary = safety_dictionary("safety_data_sheets")
dictionary.update(
{
"chemicals": chemicals,
"hazards": hazards,
"order_by": order_by,
"search_keywords": SafetyCustomization.get_bool("safety_data_sheets_keywords_default"),
}
)
return render(request, "safety/safety_data_sheets.html", dictionary)
@staff_member_required
@require_GET
def export_safety_data_sheets(request):
hazards = ChemicalHazard.objects.all()
table = BasicDisplayTable()
table.add_header(("name", "Name"))
for hazard in hazards:
table.add_header((f"hazard_{hazard.id}", hazard.name))
table.add_header(("keywords", "Keywords"))
for chemical in Chemical.objects.all():
chemical: Chemical = chemical
values = {f"hazard_{hazard.id}": "X" for hazard in hazards if hazard in chemical.hazards.all()}
values["name"] = chemical.name
values["keywords"] = chemical.keywords
table.add_row(values)
response = table.to_csv()
filename = f"safety_data_sheets_{export_format_datetime()}.csv"
response["Content-Disposition"] = f'attachment; filename="{filename}"'
return response
def safety_dictionary(tab):
sds_url_exist = navigation_url("safety_data_sheets", "")
dictionary = dict(
show_safety=SafetyCustomization.get_bool("safety_show_safety"),
show_safety_issues=SafetyCustomization.get_bool("safety_show_safety_issues"),
show_safety_data_sheets=SafetyCustomization.get_bool("safety_show_safety_data_sheets") and sds_url_exist,
)
dictionary["show_tabs"] = len([key for key, value in dictionary.items() if value])
dictionary["tab"] = tab
if tab == "safety_issues":
dictionary["safety_introduction"] = get_media_file_contents("safety_introduction.html")
return dictionary | PypiClean |
/Cubane-1.0.11.tar.gz/Cubane-1.0.11/cubane/backend/static/cubane/backend/tinymce/js/tinymce/plugins/visualchars/plugin.min.js | !function(){"use strict";var n,e,t,r,o=function(n){var e=n,t=function(){return e};return{get:t,set:function(n){e=n},clone:function(){return o(t())}}},u=tinymce.util.Tools.resolve("tinymce.PluginManager"),i=function(n){return{isEnabled:function(){return n.get()}}},c=function(n,e){return n.fire("VisualChars",{state:e})},a={"\xa0":"nbsp","\xad":"shy"},f=function(n,e){var t,r="";for(t in n)r+=t;return new RegExp("["+r+"]",e?"g":"")},l=function(n){var e,t="";for(e in n)t&&(t+=","),t+="span.mce-"+n[e];return t},s={charMap:a,regExp:f(a),regExpGlobal:f(a,!0),selector:l(a),charMapToRegExp:f,charMapToSelector:l},d=function(n){return function(){return n}},m={noop:function(){},noarg:function(n){return function(){return n()}},compose:function(n,e){return function(){return n(e.apply(null,arguments))}},constant:d,identity:function(n){return n},tripleEquals:function(n,e){return n===e},curry:function(n){for(var e=new Array(arguments.length-1),t=1;t<arguments.length;t++)e[t-1]=arguments[t];return function(){for(var t=new Array(arguments.length),r=0;r<t.length;r++)t[r]=arguments[r];var o=e.concat(t);return n.apply(null,o)}},not:function(n){return function(){return!n.apply(null,arguments)}},die:function(n){return function(){throw new Error(n)}},apply:function(n){return n()},call:function(n){n()},never:d(!1),always:d(!0)},p=m.never,h=m.always,v=function(){return g},g=(r={fold:function(n,e){return n()},is:p,isSome:p,isNone:h,getOr:t=function(n){return n},getOrThunk:e=function(n){return n()},getOrDie:function(n){throw new Error(n||"error: getOrDie called on none.")},or:t,orThunk:e,map:v,ap:v,each:function(){},bind:v,flatten:v,exists:p,forall:h,filter:v,equals:n=function(n){return n.isNone()},equals_:n,toArray:function(){return[]},toString:m.constant("none()")},Object.freeze&&Object.freeze(r),r),y=function(n){var e=function(){return n},t=function(){return o},r=function(e){return e(n)},o={fold:function(e,t){return t(n)},is:function(e){return n===e},isSome:h,isNone:p,getOr:e,getOrThunk:e,getOrDie:e,or:t,orThunk:t,map:function(e){return y(e(n))},ap:function(e){return e.fold(v,function(e){return y(e(n))})},each:function(e){e(n)},bind:r,flatten:e,exists:r,forall:r,filter:function(e){return e(n)?o:g},equals:function(e){return e.is(n)},equals_:function(e,t){return e.fold(p,function(e){return t(n,e)})},toArray:function(){return[n]},toString:function(){return"some("+n+")"}};return o},T={some:y,none:v,from:function(n){return null===n||n===undefined?g:y(n)}},w=(Array.prototype.indexOf,undefined,function(n,e){for(var t=n.length,r=new Array(t),o=0;o<t;o++){var u=n[o];r[o]=e(u,o,n)}return r}),x=function(n,e){for(var t=0,r=n.length;t<r;t++)e(n[t],t,n)},E=(Array.prototype.push,Array.prototype.slice,w),b=x,k=function(n){if(null===n||n===undefined)throw new Error("Node cannot be null or undefined");return{dom:m.constant(n)}},N={fromHtml:function(n,e){var t=(e||document).createElement("div");if(t.innerHTML=n,!t.hasChildNodes()||t.childNodes.length>1)throw console.error("HTML does not have a single root node",n),"HTML must have a single root node";return k(t.childNodes[0])},fromTag:function(n,e){var t=(e||document).createElement(n);return k(t)},fromText:function(n,e){var t=(e||document).createTextNode(n);return k(t)},fromDom:k,fromPoint:function(n,e,t){return T.from(n.dom().elementFromPoint(e,t)).map(k)}},C=8,M=9,D=3,O=function(n){return n.dom().nodeName.toLowerCase()},A=function(n){return n.dom().nodeType},S=function(n){return function(e){return A(e)===n}},B=S(1),P=S(D),V=S(M),q={name:O,type:A,value:function(n){return n.dom().nodeValue},isElement:B,isText:P,isDocument:V,isComment:function(n){return A(n)===C||"#comment"===O(n)}},H=function(n){return'<span data-mce-bogus="1" class="mce-'+s.charMap[n]+'">'+n+"</span>"},L=function(n,e){var t=[],r=n.dom(),o=E(r.childNodes,N.fromDom);return b(o,function(n){e(n)&&(t=t.concat([n])),t=t.concat(L(n,e))}),t},R={isMatch:function(n){return q.isText(n)&&q.value(n)!==undefined&&s.regExp.test(q.value(n))},filterDescendants:L,findParentElm:function(n,e){for(;n.parentNode;){if(n.parentNode===e)return n;n=n.parentNode}},replaceWithSpans:function(n){return n.replace(s.regExpGlobal,H)}},_=function(n,e){var t,r,o=R.filterDescendants(N.fromDom(e),R.isMatch);b(o,function(e){var o=R.replaceWithSpans(q.value(e));for(r=n.dom.create("div",null,o);t=r.lastChild;)n.dom.insertAfter(t,e.dom());n.dom.remove(e.dom())})},j=function(n,e){var t=n.dom.select(s.selector,e);b(t,function(e){n.dom.remove(e,1)})},z=_,G=j,W=function(n){var e=n.getBody(),t=n.selection.getBookmark(),r=R.findParentElm(n.selection.getNode(),e);r=r!==undefined?r:e,j(n,r),_(n,r),n.selection.moveToBookmark(t)},F=function(n,e){var t,r=n.getBody(),o=n.selection;e.set(!e.get()),c(n,e.get()),t=o.getBookmark(),!0===e.get()?z(n,r):G(n,r),o.moveToBookmark(t)},I=function(n,e){n.addCommand("mceVisualChars",function(){F(n,e)})},J=tinymce.util.Tools.resolve("tinymce.util.Delay"),K=function(n,e){var t=J.debounce(function(){W(n)},300);!1!==n.settings.forced_root_block&&n.on("keydown",function(r){!0===e.get()&&(13===r.keyCode?W(n):t())})},Q=function(n){return function(e){var t=e.control;n.on("VisualChars",function(n){t.active(n.state)})}};u.add("visualchars",function(n){var e,t=o(!1);return I(n,t),(e=n).addButton("visualchars",{active:!1,title:"Show invisible characters",cmd:"mceVisualChars",onPostRender:Q(e)}),e.addMenuItem("visualchars",{text:"Show invisible characters",cmd:"mceVisualChars",onPostRender:Q(e),selectable:!0,context:"view",prependToContext:!0}),K(n,t),i(t)})}(); | PypiClean |
/CC-CataLog-0.1.330.tar.gz/CC-CataLog-0.1.330/catalog/data/data_solids.py | from ase.atoms import Atoms, string2symbols # type: ignore
from ase.units import kB, kJ, kcal, mol # type: ignore
import numpy as np # type: ignore
# Sol57LC
sol57_lp = ['Li_bcc','Na_bcc','K_bcc','Rb_bcc','Ca_fcc','Sr_fcc','Ba_bcc',
'V_bcc','Nb_bcc','Ta_bcc','Mo_bcc','W_bcc','Fe_bcc',
'Rh_fcc','Ir_fcc','Ni_fcc','Pd_fcc','Pt_fcc','Cu_fcc',
'Ag_fcc','Au_fcc','Al_fcc',
'C_diamond','Si_diamond','Ge_diamond','Sn_diamond',
'LiH_b1','LiF_b1','LiCl_b1','NaF_b1','NaCl_b1','MgO_b1','MgS_b1',
'CaO_b1','TiC_b1','TiN_b1','ZrC_b1','ZrN_b1','VC_b1','VN_b1',
'NbC_b1','NbN_b1','FeAl_b2','CoAl_b2','NiAl_b2','BN_b3',
'BP_b3','BAs_b3','AlN_b3','AlP_b3','AlAs_b3','GaN_b3',
'GaP_b3','GaAs_b3','InP_b3','InAs_b3','SiC_b3']
assert len(sol57_lp) == 57
#SMS 03/2016
#Sol27LC
sol27_lp = sol57_lp[0:26]
sol27_lp.append('Pb_fcc')
assert len(sol27_lp) == 27
data_info = {
'sol27_lp':sol27_lp,
'slab':[],
'molecule':[],
'ads':[],
'dbh24':[],
'bulk':[]
}
#SMS 03/2016
# Sol58LC = Sol57LC + Pb_fcc
sol58_lp = ['Li_bcc','Na_bcc','K_bcc','Rb_bcc','Ca_fcc','Sr_fcc','Ba_bcc',
'V_bcc','Nb_bcc','Ta_bcc','Mo_bcc','W_bcc','Fe_bcc',
'Rh_fcc','Ir_fcc','Ni_fcc','Pd_fcc','Pt_fcc','Cu_fcc',
'Ag_fcc','Au_fcc','Al_fcc','Pb_fcc',
'C_diamond','Si_diamond','Ge_diamond','Sn_diamond',
'LiH_b1','LiF_b1','LiCl_b1','NaF_b1','NaCl_b1','MgO_b1','MgS_b1',
'CaO_b1','TiC_b1','TiN_b1','ZrC_b1','ZrN_b1','VC_b1','VN_b1',
'NbC_b1','NbN_b1','FeAl_b2','CoAl_b2','NiAl_b2','BN_b3',
'BP_b3','BAs_b3','AlN_b3','AlP_b3','AlAs_b3','GaN_b3',
'GaP_b3','GaAs_b3','InP_b3','InAs_b3','SiC_b3']
assert len(sol58_lp) == 58
# Sol53Ec
sol53_coh = ['Li_bcc','Na_bcc','K_bcc','Rb_bcc','Ca_fcc','Sr_fcc','Ba_bcc',
'V_bcc','Nb_bcc','Ta_bcc','Mo_bcc','W_bcc','Fe_bcc',
'Rh_fcc','Ir_fcc','Ni_fcc','Pd_fcc','Pt_fcc','Cu_fcc',
'Ag_fcc','Au_fcc','Al_fcc',
'C_diamond','Si_diamond','Ge_diamond','Sn_diamond',
'LiH_b1','LiF_b1','LiCl_b1','NaF_b1','NaCl_b1','MgO_b1','MgS_b1',
'CaO_b1','TiC_b1','TiN_b1','ZrC_b1','ZrN_b1','VC_b1','VN_b1',
'NbC_b1','NbN_b1','BN_b3','BP_b3','AlN_b3','AlP_b3','AlAs_b3',
'GaN_b3','GaP_b3','GaAs_b3','InP_b3','InAs_b3','SiC_b3']
assert len(sol53_coh) == 53
# Sol54Ec
sol54_coh = ['Li_bcc','Na_bcc','K_bcc','Rb_bcc','Ca_fcc','Sr_fcc','Ba_bcc',
'V_bcc','Nb_bcc','Ta_bcc','Mo_bcc','W_bcc','Fe_bcc',
'Rh_fcc','Ir_fcc','Ni_fcc','Pd_fcc','Pt_fcc','Cu_fcc',
'Ag_fcc','Au_fcc','Al_fcc','Pb_fcc',
'C_diamond','Si_diamond','Ge_diamond','Sn_diamond',
'LiH_b1','LiF_b1','LiCl_b1','NaF_b1','NaCl_b1','MgO_b1','MgS_b1',
'CaO_b1','TiC_b1','TiN_b1','ZrC_b1','ZrN_b1','VC_b1','VN_b1',
'NbC_b1','NbN_b1','BN_b3','BP_b3','AlN_b3','AlP_b3','AlAs_b3',
'GaN_b3','GaP_b3','GaAs_b3','InP_b3','InAs_b3','SiC_b3']
assert len(sol54_coh) == 54
# BM32
bm32 = ['Li_bcc','Na_bcc','Ca_fcc','Sr_fcc','Ba_bcc',
'Rh_fcc','Pd_fcc','Cu_fcc','Ag_fcc','Al_fcc',
'C_diamond','Si_diamond','Ge_diamond','Sn_diamond',
'LiH_b1','LiF_b1','LiCl_b1','NaF_b1','NaCl_b1','MgO_b1',
'BN_b3','BP_b3','BAs_b3','AlN_b3','AlP_b3','AlAs_b3',
'GaN_b3','GaP_b3','GaAs_b3','InP_b3','InAs_b3','SiC_b3']
assert len(bm32) == 32
# hcp
hcp_solids_10 = ['Cd_hcp', 'Co_hcp', 'Os_hcp', 'Ru_hcp', 'Zn_hcp',
'Zr_hcp', 'Sc_hcp', 'Be_hcp', 'Mg_hcp', 'Ti_hcp']
assert len(hcp_solids_10) == 10
# SE30
se21 = ['Li110','Na110','K110','Rb110','Ba110','Ca111','Sr111',
'Nb110','Ta110','Mo110','W110','Fe110','Al111','Ni111',
'Cu111','Rh111','Pd111','Ag111','Ir111','Pt111','Au111']
se9 = ['Mg0001','Zn0001','Cd0001','Sc0001','Ti0001',
'Co0001','Zr0001','Ru0001','Os0001']
se30 = se21+se9
assert len(se30) == 30 | PypiClean |
/FamcyDev-0.3.71-py3-none-any.whl/Famcy/bower_components/bootstrap-table/src/locale/bootstrap-table-ur-PK.js | $.fn.bootstrapTable.locales['ur-PK'] = $.fn.bootstrapTable.locales['ur'] = {
formatCopyRows () {
return 'Copy Rows'
},
formatPrint () {
return 'Print'
},
formatLoadingMessage () {
return 'براۓ مہربانی انتظار کیجئے'
},
formatRecordsPerPage (pageNumber) {
return `${pageNumber} ریکارڈز فی صفہ `
},
formatShowingRows (pageFrom, pageTo, totalRows, totalNotFiltered) {
if (totalNotFiltered !== undefined && totalNotFiltered > 0 && totalNotFiltered > totalRows) {
return `دیکھیں ${pageFrom} سے ${pageTo} کے ${totalRows}ریکارڈز (filtered from ${totalNotFiltered} total rows)`
}
return `دیکھیں ${pageFrom} سے ${pageTo} کے ${totalRows}ریکارڈز`
},
formatSRPaginationPreText () {
return 'previous page'
},
formatSRPaginationPageText (page) {
return `to page ${page}`
},
formatSRPaginationNextText () {
return 'next page'
},
formatDetailPagination (totalRows) {
return `Showing ${totalRows} rows`
},
formatClearSearch () {
return 'Clear Search'
},
formatSearch () {
return 'تلاش'
},
formatNoMatches () {
return 'کوئی ریکارڈ نہیں ملا'
},
formatPaginationSwitch () {
return 'Hide/Show pagination'
},
formatPaginationSwitchDown () {
return 'Show pagination'
},
formatPaginationSwitchUp () {
return 'Hide pagination'
},
formatRefresh () {
return 'تازہ کریں'
},
formatToggle () {
return 'تبدیل کریں'
},
formatToggleOn () {
return 'Show card view'
},
formatToggleOff () {
return 'Hide card view'
},
formatColumns () {
return 'کالم'
},
formatColumnsToggleAll () {
return 'Toggle all'
},
formatFullscreen () {
return 'Fullscreen'
},
formatAllRows () {
return 'All'
},
formatAutoRefresh () {
return 'Auto Refresh'
},
formatExport () {
return 'Export data'
},
formatJumpTo () {
return 'GO'
},
formatAdvancedSearch () {
return 'Advanced search'
},
formatAdvancedCloseButton () {
return 'Close'
},
formatFilterControlSwitch () {
return 'Hide/Show controls'
},
formatFilterControlSwitchHide () {
return 'Hide controls'
},
formatFilterControlSwitchShow () {
return 'Show controls'
}
}
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['ur-PK']) | PypiClean |
/Finance-Ultron-1.0.8.1.tar.gz/Finance-Ultron-1.0.8.1/demo/ga_factor.py | import os, pdb, itertools, copy, datetime
os.environ['ULTRON_DATA'] = 'keim'
os.environ['IGNORE_WARNINGS'] = '0'
import random
import numpy as np
import pandas as pd
from ultron.env import *
from ultron.factor.genetic.geneticist.operators import custom_transformer
from ultron.factor.genetic.geneticist.engine import Engine
enable_example_env()
market_data = pd.read_csv(os.path.join(g_project_data, 'market_data.csv'),
index_col=0)
market_data['trade_date'] = pd.to_datetime(market_data['trade_date'])
def next_returs_impl(price_data, key, name):
price_tb = price_data[key].unstack()
price_tb.fillna(method='pad', inplace=True)
return_tb = np.log(price_tb.shift(-1) / price_tb)
return_tb = return_tb.replace([np.inf, -np.inf], np.nan)
return_tb = return_tb.stack().reindex(price_data.index)
return_tb.name = name
return return_tb
next_rets = next_returs_impl(market_data.set_index(['trade_date', 'code']),
'closePrice', 'nxt1_ret').reset_index()
next_rets['trade_date'] = pd.to_datetime(next_rets['trade_date'])
sel_factor = pd.read_csv(os.path.join(g_project_data, 'sel_factor.csv'),
index_col=0)
total_data = pd.read_csv(os.path.join(g_project_data, 'factor.csv'),
index_col=0)
factor_data = total_data[['trade_date', 'code'] +
sel_factor['factor'].unique().tolist()]
factor_data['trade_date'] = pd.to_datetime(factor_data['trade_date'])
features = [
col for col in factor_data.columns
if col not in ['trade_date', 'code', 'inventory', 'profitratio']
]
total_data = factor_data.merge(next_rets, on=['trade_date', 'code'])
def save_model(gen, rootid, best_programs, custom_params):
pdb.set_trace()
print('-->')
def evaluation(factor_data, total_data, factor_sets, custom_params,
default_value):
returns = total_data[['trade_date', 'code', 'nxt1_ret']]
factor_data = factor_data.reset_index()
dt = factor_data.merge(returns, on=['trade_date', 'code'])
factor_ic = dt.groupby(['trade_date']).apply(lambda x: x[
['transformed', 'nxt1_ret']].corr(method='spearman').values[0, 1])
ic = factor_ic.mean()
return abs(ic)
operators_sets = ['EMA', 'RSI', 'CSMean', ' CSRank', 'ADDED', 'SUBBED']
tournament_size = 5 ### 初始种群数量
standard_score = 0.75 ### 筛选评估分
custom_params = {
'tournament_size': tournament_size,
'standard_score': standard_score,
'evaluate': 'both',
'method': 'fitness'
}
operators_sets = custom_transformer(operators_sets)
gentic = Engine(population_size=10,
tournament_size=tournament_size,
init_depth=2,
generations=1000,
n_jobs=1,
stopping_criteria=100,
p_crossover=0.1,
p_point_mutation=0.5,
p_subtree_mutation=0.1,
p_hoist_mutation=0.1,
p_point_replace=0.1,
factor_sets=features,
standard_score=standard_score,
operators_set=operators_sets,
backup_cycle=20,
convergence=0.002,
save_model=save_model,
custom_params=custom_params)
pdb.set_trace()
gentic.train(total_data=total_data) | PypiClean |
/ANNOgesic-1.1.14.linux-x86_64.tar.gz/usr/local/lib/python3.10/dist-packages/annogesiclib/stat_operon.py | import csv
import itertools
def _boolean(data):
if data == "False":
result = False
else:
result = True
return result
def row_to_location(row):
if row[4] == "0":
sub = False
nosub = True
else:
sub = True
nosub = False
tss = _boolean(row[6])
term = _boolean(row[8])
return {"have no sub-operons": nosub, "have sub-operons": sub,
"start with tss": tss, "stop with terminator": term}
def plus_num(num_total, strain, type_):
num_total["total"][type_] += 1
num_total[strain][type_] += 1
num_total["total"]["total"] += 1
num_total[strain]["total"] += 1
def print_stat(operons, total_num, class_operon, out):
num_features = {}
out.write("Total number of operons is {0}\n".format(total_num))
out.write("The sub operon and features:\n")
for operon in operons:
for it in range(1, 5):
for features in itertools.combinations(operon.keys(), it):
check_key = 0
for key in features:
if operon[key]:
if it == 1:
if key in num_features.keys():
num_features[key] += 1
else:
num_features[key] = 1
check_key += 1
if (check_key == it) and (it != 1):
key = " and ".join(features)
if key in num_features.keys():
num_features[key] += 1
else:
num_features[key] = 1
for key, value in num_features.items():
out.write("\tthe number of operons which {0} = {1} ({2})\n".format(
key, value, float(value) / float(total_num)))
out.write("mono/polycistronic:\n")
out.write("\tmonocistronic: {0} ({1})\n".format(
class_operon["mono"],
float(class_operon["mono"]) / float(class_operon["total"])))
out.write("\tpolycistronic: {0} ({1})\n".format(
class_operon["poly"],
float(class_operon["poly"]) / float(class_operon["total"])))
def stat(input_file, out_file):
out = open(out_file, "w")
operons = {}
operons_all = []
tmp_id = ""
f_h = open(input_file, "r")
pre_seq_id = ""
total_num = {}
total_num_all = 0
class_operon = {}
class_operon["total"] = {"na": 0, "mono": 0, "poly": 0, "total": 0}
for row in csv.reader(f_h, delimiter="\t"):
if row[0] != "Operon_ID":
if row[0] != tmp_id:
if pre_seq_id != row[1]:
pre_seq_id = row[1]
operons[row[1]] = []
total_num[row[1]] = 0
class_operon[row[1]] = {"na": 0, "mono": 0,
"poly": 0, "total": 0}
operons[row[1]].append(row_to_location(row))
operons_all.append(row_to_location(row))
total_num[row[1]] += 1
total_num_all += 1
if row[-1] == "NA":
plus_num(class_operon, row[1], "na")
elif len(row[-1].split(",")) == 1:
plus_num(class_operon, row[1], "mono")
elif len(row[-1].split(",")) > 1:
plus_num(class_operon, row[1], "poly")
tmp_id = row[0]
if len(operons) > 1:
out.write("All genomes:\n")
print_stat(operons_all, total_num_all, class_operon["total"], out)
for strain in operons.keys():
out.write("\n" + strain + ":\n")
print_stat(operons[strain], total_num[strain],
class_operon[strain], out)
out.close()
f_h.close() | PypiClean |
/CLAchievements-0.1.0.tar.gz/CLAchievements-0.1.0/doc/install.rst | Install and Enable
==================
.. warning:: :ref:`Installing <install>` CLAchievements is not enough: it has to be :ref:`enabled <enable>`.
.. _install:
Install
-------
PyGObject
"""""""""
CLAchievements uses `PyGObject <https://wiki.gnome.org/Projects/PyGObject>`_ to display the achievements (other methods might be supported later). Thus, it must be installed, either system-wide (if CLAchievements is not installed in a virtualenv, or if the virtualenv has been created with option ``--system-site-packages``), or as a dependency (see the *extra* ``pgi`` dependency below).
From sources
""""""""""""
* Download: https://pypi.python.org/pypi/clachievements
* Install (in a `virtualenv`, if you do not want to mess with your distribution installation system)::
python3 setup.py install
Or, to install the ``pgi`` dependency as well::
python3 setup.py install[pgi]
From pip
""""""""
Use::
pip install clachievements
Or, if you need the ``pgi`` dependency as well::
pip install clachievements[pgi]
Quick and dirty Debian (and Ubuntu?) package
""""""""""""""""""""""""""""""""""""""""""""
This requires `stdeb <https://github.com/astraw/stdeb>`_ to be installed::
python3 setup.py --command-packages=stdeb.command bdist_deb
sudo dpkg -i deb_dist/clachievements-<VERSION>_all.deb
The ``PyGObject`` dependency is proposed as an optional requirement.
.. _enable:
Enable
------
Once CLAchievements is installed, it does not work yet. Running ``ls`` will not trigger any achievement: you will to wrap it using CLAchievements by running ``clachievements run ls``.
Replacing ``ls`` by ``clachievements run ls`` will change your habits. You do not want it. So, it should be aliased: ``alias ls="clachievements run ls"``.
All the commands triggering achievements should be aliased. To ease this, the :ref:`clachievements <usage>` command provides a sub-command ``clachievements alias``, which display the shell code generating all the required aliases. Thus, in your :file:`.bashrc` (or :file:`.watheverrc`), write the line ``$(clachievements alias)`` to enable every aliases.
Check
-----
To check if CLAchievements is enabled, run ``ls`` in a terminal. If you see the ``So it begins…`` achievement unlocked, it works. Otherwise, it does not… yet.
If you are not sure about wether CLAchievements works or not, reset the achievements using ``clachievements reset``, and run ``ls`` again to test it.
| PypiClean |
/JaxHankel-0.1.2.tar.gz/JaxHankel-0.1.2/LICENSE.md | GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short
notice like this when it starts in an interactive mode:
<program> Copyright (C) <year> <name of author>
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, your program's commands
might be different; for a GUI interface, you would use an "about box".
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU GPL, see
<https://www.gnu.org/licenses/>.
The GNU General Public License does not permit incorporating your program
into proprietary programs. If your program is a subroutine library, you
may consider it more useful to permit linking proprietary applications with
the library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License. But first, please read
<https://www.gnu.org/licenses/why-not-lgpl.html>.
| PypiClean |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.