code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
"""
Kravatte Achouffe Cipher Suite: Encryption, Decryption, and Authentication Tools based on the Farfalle modes
Copyright 2018 <NAME>
see LICENSE file
"""
from multiprocessing import Pool
from math import floor, ceil, log2
from typing import Tuple
from os import cpu_count
from ctypes import memset
import numpy as np
KravatteTagOutput = Tuple[bytes, bytes]
KravatteValidatedOutput = Tuple[bytes, bool]
class Kravatte(object):
"""Implementation of the Farfalle Pseudo-Random Function (PRF) construct utilizing the
Keccak-1600 permutation.
"""
KECCAK_BYTES = 200
'''Number of Bytes in Keccak-1600 state'''
KECCAK_LANES = 25
'''Number of 8-Byte lanes in Keccak-1600 state'''
KECCAK_PLANES_SLICES = 5
''' Size of x/y dimensions of Keccak lane array '''
THETA_REORDER = ((4, 0, 1, 2, 3), (1, 2, 3, 4, 0))
IOTA_CONSTANTS = np.array([0x000000000000800A, 0x800000008000000A, 0x8000000080008081,
0x8000000000008080, 0x0000000080000001, 0x8000000080008008],
dtype=np.uint64)
'''Iota Step Round Constants For Keccak-p(1600, 4) and Keccak-p(1600, 6)'''
RHO_SHIFTS = np.array([[0, 36, 3, 41, 18],
[1, 44, 10, 45, 2],
[62, 6, 43, 15, 61],
[28, 55, 25, 21, 56],
[27, 20, 39, 8, 14]], dtype=np.uint64)
'''Lane Shifts for Rho Step'''
CHI_REORDER = ((1, 2, 3, 4, 0), (2, 3, 4, 0, 1))
'''Lane Re-order Mapping for Chi Step'''
PI_ROW_REORDER = np.array([[0, 3, 1, 4, 2],
[1, 4, 2, 0, 3],
[2, 0, 3, 1, 4],
[3, 1, 4, 2, 0],
[4, 2, 0, 3, 1]])
'''Row Re-order Mapping for Pi Step'''
PI_COLUMN_REORDER = np.array([[0, 0, 0, 0, 0],
[1, 1, 1, 1, 1],
[2, 2, 2, 2, 2],
[3, 3, 3, 3, 3],
[4, 4, 4, 4, 4]])
'''Column Re-order Mapping for Pi Step'''
COMPRESS_ROW_REORDER = np.array([[0, 0, 0, 0, 1],
[1, 1, 1, 1, 2],
[2, 2, 2, 2, 3],
[3, 3, 3, 3, 4],
[4, 4, 4, 4, 0]])
'''Row Re-order Mapping for Compress Step'''
COMPRESS_COLUMN_REORDER = np.array([[0, 1, 2, 3, 4],
[0, 1, 2, 3, 4],
[0, 1, 2, 3, 4],
[0, 1, 2, 3, 4],
[0, 1, 2, 3, 4]])
'''Column Re-order Mapping for Compress Step'''
EXPAND_ROW_REORDER = np.array([[0, 0, 0, 1, 1],
[1, 1, 1, 2, 2],
[2, 2, 2, 3, 3],
[3, 3, 3, 4, 4],
[4, 4, 4, 0, 0]])
'''Row Re-order Mapping for Expand Step'''
EXPAND_COLUMN_REORDER = np.array([[0, 1, 2, 3, 4],
[0, 1, 2, 3, 4],
[0, 1, 2, 3, 4],
[0, 1, 2, 3, 4],
[0, 1, 2, 4, 4]])
'''Column Re-order Mapping for Expand Step'''
def __init__(self, key: bytes=b'', workers: int=None, mp_input: bool=True, mp_output: bool=True):
"""
Initialize Kravatte with user key
Inputs:
key (bytes): encryption/authentication key
workers (int): parallel processes to use in compression/expansion operations
mp_input (bool): Enable multi-processing for calculations on input data
mp_output (bool): Enable multi-processing for calculations on output data
"""
self.update_key(key)
self.reset_state()
# Enable Standard or Optimized Multi-process codepaths
if workers is not None:
self.collect_message = self._collect_message_mp if mp_input else self._collect_message_sp
self.generate_digest = self._generate_digest_mp if mp_output else self._generate_digest_sp
self.workers = cpu_count() if workers == 0 else workers
else:
self.collect_message = self._collect_message_sp
self.generate_digest = self._generate_digest_sp
self.workers = None
def update_key(self, key: bytes) -> None:
"""
Pad and compute new Kravatte base key from bytes source.
Inputs:
key (bytes): user provided bytes to be padded (if necessary) and computed into Kravatte base key
"""
key_pad = self._pad_10_append(key, self.KECCAK_BYTES)
key_array = np.frombuffer(key_pad, dtype=np.uint64, count=self.KECCAK_LANES,
offset=0).reshape([self.KECCAK_PLANES_SLICES,
self.KECCAK_PLANES_SLICES], order='F')
self.kra_key = self._keccak(key_array)
def reset_state(self) -> None:
"""
Clear existing Farfalle/Kravatte state and prepares for new input message collection.
Elements reset include:
- Message block collector
- Rolling key
- Currently stored output digest
- Digest Active and New Collector Flags
Inputs:
None
"""
self.roll_key = np.copy(self.kra_key)
self.collector = np.zeros([5, 5], dtype=np.uint64)
self.digest = bytearray(b'')
self.digest_active = False
self.new_collector = True
def _generate_absorb_queue(self, absorb_steps: int, kra_msg: bytes):
"""
Generator for Keccak-sized blocks of input message for Farfalle compression
Inputs:
absorb_steps (int): Number of blocks to generate for absorption
kra_msg (bytes): padded input message ready for slicing into input blocks
"""
for msg_block in range(absorb_steps):
yield (np.frombuffer(kra_msg, dtype=np.uint64, count=25, offset=msg_block * self.KECCAK_BYTES).reshape([5, 5], order='F') ^ self.roll_key)
self.roll_key = self._kravatte_roll_compress(self.roll_key)
def _collect_message_sp(self, message: bytes, append_bits: int=0, append_bit_count: int=0) -> None:
"""
Pad and Process Blocks of Message into Kravatte collector state
Inputs:
message (bytes): arbitrary number of bytes to be padded into Keccak blocks and absorbed into the collector
append_bits (int): bits to append to the message before padding. Required for more advanced Kravatte modes.
append_bit_count (int): number of bits to append
"""
if self.digest_active:
self.reset_state()
if self.new_collector:
self.new_collector = False
else:
self.roll_key = self._kravatte_roll_compress(self.roll_key)
# Pad Message
msg_len = len(message)
kra_msg = self._pad_10_append(message, msg_len + (self.KECCAK_BYTES - (msg_len % self.KECCAK_BYTES)), append_bits, append_bit_count)
absorb_steps = len(kra_msg) // self.KECCAK_BYTES
# Absorb into Collector
for msg_block in range(absorb_steps):
m = np.frombuffer(kra_msg, dtype=np.uint64, count=25, offset=msg_block * self.KECCAK_BYTES).reshape([5, 5], order='F')
m_k = m ^ self.roll_key
self.roll_key = self._kravatte_roll_compress(self.roll_key)
self.collector = self.collector ^ self._keccak(m_k)
def _collect_message_mp(self, message: bytes, append_bits: int=0, append_bit_count: int=0) -> None:
"""
Pad and Process Blocks of Message into Kravatte collector state - Multi-process Aware Variant
Inputs:
message (bytes): arbitrary number of bytes to be padded into Keccak blocks and absorbed into the collector
append_bits (int): bits to append to the message before padding. Required for more advanced Kravatte modes.
append_bit_count (int): number of bits to append
"""
if self.digest_active:
self.reset_state()
if self.new_collector:
self.new_collector = False
else:
self.roll_key = self._kravatte_roll_compress(self.roll_key)
# Pad Message
msg_len = len(message)
kra_msg = self._pad_10_append(message, msg_len + (self.KECCAK_BYTES - (msg_len % self.KECCAK_BYTES)), append_bits, append_bit_count)
absorb_steps = len(kra_msg) // self.KECCAK_BYTES
workload = 1 if (absorb_steps // self.workers) == 0 else (absorb_steps // self.workers)
with Pool(processes=self.workers) as kravatte_pool:
for output_element in kravatte_pool.imap_unordered(self._keccak, self._generate_absorb_queue(absorb_steps, kra_msg), chunksize=workload):
self.collector ^= output_element
def _generate_digest_sp(self, output_size: int, short_kravatte: bool=False) -> None:
"""
Squeeze an arbitrary number of bytes from collector state
Inputs:
output_size (int): Number of bytes to generate and store in Kravatte digest parameter
short_kravatte (bool): Enable disable short kravatte required for other Kravatte modes
"""
if not self.digest_active:
self.collector = self.collector if short_kravatte else self._keccak(self.collector)
self.roll_key = self._kravatte_roll_compress(self.roll_key)
self.digest_active = True
self.digest = bytearray(b'')
full_output_size = output_size + (200 - (output_size % 200)) if output_size % 200 else output_size
generate_steps = full_output_size // 200
for _ in range(generate_steps):
collector_squeeze = self._keccak(self.collector)
self.collector = self._kravatte_roll_expand(self.collector)
self.digest.extend((collector_squeeze ^ self.roll_key).tobytes('F'))
self.digest = self.digest[:output_size]
def _generate_squeeze_queue(self, generate_steps: int):
"""
Generator for Keccak-sized blocks of expanded collector state for output squeezing
Inputs:
generate_steps (int): Number of blocks to generate and for absorb
"""
for _ in range(generate_steps):
yield self.collector
self.collector = self._kravatte_roll_expand(self.collector)
def _generate_digest_mp(self, output_size: int, short_kravatte: bool=False) -> None:
"""
Squeeze an arbitrary number of bytes from collector state - Multi-process Aware Variant
Inputs:
output_size (int): Number of bytes to generate and store in Kravatte digest parameter
short_kravatte (bool): Enable disable short kravatte required for other Kravatte modes
"""
if not self.digest_active:
self.collector = self.collector if short_kravatte else self._keccak(self.collector)
self.roll_key = self._kravatte_roll_compress(self.roll_key)
self.digest_active = True
self.digest = bytearray(b'')
full_output_size = output_size + (200 - (output_size % 200)) if output_size % 200 else output_size
generate_steps = full_output_size // 200
workload = 1 if (generate_steps // self.workers) == 0 else (generate_steps // self.workers)
with Pool(processes=self.workers) as kravatte_pool:
for digest_block in kravatte_pool.imap(self._keccak_xor_key, self._generate_squeeze_queue(generate_steps), chunksize=workload):
self.digest.extend(digest_block.tobytes('F'))
self.digest = self.digest[:output_size]
def _keccak(self, input_array):
"""
Implementation of Keccak-1600 PRF defined in FIPS 202
Inputs:
input_array (numpy array): Keccak compatible state array: 200-byte as 5x5 64-bit lanes
Return:
numpy array: Keccak compatible state array: 200-byte as 5x5 64-bit lanes
"""
state = np.copy(input_array)
for round_num in range(6):
# theta_step:
# Exclusive-or each slice-lane by state based permutation value
array_shift = state << 1 | state >> 63
state ^= np.bitwise_xor.reduce(state[self.THETA_REORDER[0], ], 1, keepdims=True) ^ np.bitwise_xor.reduce(array_shift[self.THETA_REORDER[1], ], 1, keepdims=True)
# rho_step:
# Left Rotate each lane by pre-calculated value
state = state << self.RHO_SHIFTS | state >> np.uint64(64 - self.RHO_SHIFTS)
# pi_step:
# Shuffle lanes to pre-calculated positions
state = state[self.PI_ROW_REORDER, self.PI_COLUMN_REORDER]
# chi_step:
# Exclusive-or each individual lane based on and/invert permutation
state ^= ~state[self.CHI_REORDER[0], ] & state[self.CHI_REORDER[1], ]
# iota_step:
# Exclusive-or first lane of state with round constant
state[0, 0] ^= self.IOTA_CONSTANTS[round_num]
return state
def _keccak_xor_key(self, input_array):
"""
Implementation of Keccak-1600 PRF defined in FIPS 202 plus an XOR with the current key state
Inputs:
input_array (numpy array): Keccak compatible state array: 200-byte as 5x5 64-bit lanes
Return:
numpy array: Keccak compatible state array: 200-byte as 5x5 64-bit lanes
"""
state = np.copy(input_array)
for round_num in range(6):
# theta_step:
# Exclusive-or each slice-lane by state based permutation value
array_shift = state << 1 | state >> 63
state ^= np.bitwise_xor.reduce(state[self.THETA_REORDER[0], ], 1, keepdims=True) ^ np.bitwise_xor.reduce(array_shift[self.THETA_REORDER[1], ], 1, keepdims=True)
# rho_step:
# Left Rotate each lane by pre-calculated value
state = state << self.RHO_SHIFTS | state >> np.uint64(64 - self.RHO_SHIFTS)
# pi_step:
# Shuffle lanes to pre-calculated positions
state = state[self.PI_ROW_REORDER, self.PI_COLUMN_REORDER]
# chi_step:
# Exclusive-or each individual lane based on and/invert permutation
state ^= ~state[self.CHI_REORDER[0], ] & state[self.CHI_REORDER[1], ]
# iota_step:
# Exclusive-or first lane of state with round constant
state[0, 0] ^= self.IOTA_CONSTANTS[round_num]
return state ^ self.roll_key
def scrub(self):
"""
Explicitly zero out both the key and collector array states. Use prior to reinitialization of
key or when finished with object to help avoid leaving secret/interim data in memory.
WARNING: Does not guarantee other copies of these arrays are not present elsewhere in memory
Not applicable in multi-process mode.
Inputs:
None
Return:
None
"""
# Clear collector array
collector_location = self.collector.ctypes.data
memset(collector_location, 0x00, self.KECCAK_BYTES)
# Clear Kravatte base key array
key_location = self.kra_key.ctypes.data
memset(key_location, 0x00, self.KECCAK_BYTES)
# Clear Kravatte rolling key array
key_location = self.roll_key.ctypes.data
memset(key_location, 0x00, self.KECCAK_BYTES)
def _kravatte_roll_compress(self, input_array):
"""
Kravatte defined roll function for compression side of Farfalle PRF
Inputs:
input_array (numpy array): Keccak compatible state array: 200-byte as 5x5 64-bit lanes
Return:
numpy array: Keccak compatible state array: 200-byte as 5x5 64-bit lanes
"""
state = input_array[self.COMPRESS_ROW_REORDER, self.COMPRESS_COLUMN_REORDER]
state[4, 4] = ((state[4, 4] << np.uint64(7)) | (state[4, 4] >> np.uint64(57))) ^ \
(state[0, 4]) ^ \
(state[0, 4] >> np.uint64(3))
return state
def _kravatte_roll_expand(self, input_array):
"""
Kravatte defined roll function for expansion side of Farfalle PRF
Inputs:
input_array (numpy array): Keccak compatible state array: 200-byte as 5x5 64-bit lanes
Return:
numpy array: Keccak compatible state array: 200-byte as 5x5 64-bit lanes
"""
state = input_array[self.EXPAND_ROW_REORDER, self.EXPAND_COLUMN_REORDER]
state[4, 4] = ((input_array[0, 3] << np.uint64(7)) | (input_array[0, 3] >> np.uint64(57))) ^ \
((input_array[1, 3] << np.uint64(18)) | (input_array[1, 3] >> np.uint64(46))) ^ \
((input_array[1, 3] >> np.uint64(1)) & input_array[2, 3])
return state
@staticmethod
def _pad_10_append(input_bytes: bytes, desired_length: int, append_bits: int=0, append_bit_count: int=0) -> bytes:
"""
Farfalle defined padding function. Limited to byte divisible inputs only
Inputs:
input_bytes (bytes): Collection of bytes
desired_length (int): Number of bytes to pad input len out to
append_bits (int): one or more bits to be inserted before the padding starts. Allows
"appending" bits as required by several Kravatte modes
append_bit_count (int): number of bits to append
Return:
bytes: input bytes with padding applied
"""
start_len = len(input_bytes)
if start_len == desired_length:
return input_bytes
head_pad_byte = bytes([(0b01 << append_bit_count) | (((2**append_bit_count) - 1) & append_bits)])
pad_len = desired_length - (start_len % desired_length)
padded_bytes = input_bytes + head_pad_byte + (b'\x00' * (pad_len - 1))
return padded_bytes
@staticmethod
def compare_bytes(a: bytes, b: bytes) -> bool:
"""
Time Constant Byte Comparison Function
Inputs:
a (bytes): first set of bytes
b (bytes): second set of bytes
Return:
boolean
"""
compare = True
if len(a) != len(b):
return False
for (element_a, element_b) in zip(a, b):
compare = compare and (element_a == element_b)
return compare
def mac(key: bytes, message: bytes, output_size: int, workers: int=None, mp_input: bool=True,
mp_output: bool=True) -> bytearray:
"""
Kravatte Message Authentication Code Generation of given length from a message
based on a user provided key
Args:
key (bytes): User authentication key (0 - 200 bytes)
message (bytes): User message
output_size (int): Size of authenticated digest in bytes
workers (int): parallel processes to use in compression/expansion operations
mp_input (bool): Enable multi-processing for calculations on input data
mp_output (bool): Enable multi-processing for calculations on output data
Returns:
bytes: message authentication bytes of length output_size
"""
kravatte_mac_gen = Kravatte(key, workers=workers, mp_input=mp_input, mp_output=mp_output)
kravatte_mac_gen.collect_message(message)
kravatte_mac_gen.generate_digest(output_size)
kravatte_mac_gen.scrub()
return kravatte_mac_gen.digest
def siv_wrap(key: bytes, message: bytes, metadata: bytes, tag_size: int=32, workers: int=None,
mp_input: bool=True, mp_output: bool=True) -> KravatteTagOutput:
"""
Authenticated Encryption with Associated Data (AEAD) of a provided plaintext using a key and
metadata using the Synthetic Initialization Vector method described in the Farfalle/Kravatte
spec. Generates ciphertext (of equivalent length to the plaintext) and verification tag. Inverse
of siv_unwrap function.
Args:
key (bytes): Encryption key; 0-200 bytes in length
message (bytes): Plaintext message for encryption
metadata (bytes): Nonce/Seed value for authenticated encryption
tag_size (int, optional): The tag size in bytes. Defaults to 32 bytes as defined in the
Kravatte spec
workers (int): parallel processes to use in compression/expansion operations
mp_input (bool): Enable multi-processing for calculations on input data
mp_output (bool): Enable multi-processing for calculations on output data
Returns:
tuple (bytes, bytes): Bytes of ciphertext and tag
"""
# Initialize Kravatte
kravatte_siv_wrap = Kravatte(key, workers=workers, mp_input=mp_input, mp_output=mp_output)
# Generate Tag From Metadata and Plaintext
kravatte_siv_wrap.collect_message(metadata)
kravatte_siv_wrap.collect_message(message)
kravatte_siv_wrap.generate_digest(tag_size)
siv_tag = kravatte_siv_wrap.digest
# Generate Key Stream
kravatte_siv_wrap.collect_message(metadata)
kravatte_siv_wrap.collect_message(siv_tag)
kravatte_siv_wrap.generate_digest(len(message))
ciphertext = bytes([p_text ^ key_stream for p_text, key_stream in zip(message, kravatte_siv_wrap.digest)])
kravatte_siv_wrap.scrub()
return ciphertext, siv_tag
def siv_unwrap(key: bytes, ciphertext: bytes, siv_tag: bytes, metadata: bytes, workers: int=None,
mp_input: bool=True, mp_output: bool=True) -> KravatteValidatedOutput:
"""
Decryption of Synthetic Initialization Vector method described in the Farfalle/Kravatte
spec. Given a key, metadata, and validation tag, generates plaintext (of equivalent length to
the ciphertext) and validates message based on included tag, metadata, and key. Inverse of
siv_wrap function.
Args:
key (bytes): Encryption key; 0-200 bytes in length
ciphertext (bytes): Ciphertext SIV Message
siv_tag (bytes): Authenticating byte string
metadata (bytes): Metadata used to encrypt message and generate tag
workers (int): parallel processes to use in compression/expansion operations
mp_input (bool): Enable multi-processing for calculations on input data
mp_output (bool): Enable multi-processing for calculations on output data
Returns:
tuple (bytes, boolean): Bytes of plaintext and message validation boolean
"""
# Initialize Kravatte
kravatte_siv_unwrap = Kravatte(key, workers=workers, mp_input=mp_input, mp_output=mp_output)
# Re-Generate Key Stream
kravatte_siv_unwrap.collect_message(metadata)
kravatte_siv_unwrap.collect_message(siv_tag)
kravatte_siv_unwrap.generate_digest(len(ciphertext))
siv_plaintext = bytes([p_text ^ key_stream for p_text, key_stream in zip(ciphertext, kravatte_siv_unwrap.digest)])
# Re-Generate Tag From Metadata and Recovered Plaintext
kravatte_siv_unwrap.collect_message(metadata)
kravatte_siv_unwrap.collect_message(siv_plaintext)
kravatte_siv_unwrap.generate_digest(len(siv_tag))
generated_tag = kravatte_siv_unwrap.digest
# Check if tag matches provided tag matches reconstituted tag
valid_tag = kravatte_siv_unwrap.compare_bytes(siv_tag, generated_tag)
kravatte_siv_unwrap.scrub()
return siv_plaintext, valid_tag
class KravatteSAE(Kravatte):
"""
An authenticated encryption mode designed to track a session consisting of a series of messages
and an initialization nonce. ** DEPRECATED in favor of KravatteSANE **
"""
TAG_SIZE = 16
OFFSET = TAG_SIZE
def __init__(self, nonce: bytes, key: bytes=b'', workers: int=None, mp_input: bool=True,
mp_output: bool=True):
"""
Initialize KravatteSAE with user key and nonce
Args:
nonce (bytes) - random unique value to initialize the session with
key (bytes) - secret key for encrypting session messages
workers (int): parallel processes to use in compression/expansion operations
mp_input (bool): Enable multi-processing for calculations on input data
mp_output (bool): Enable multi-processing for calculations on output data
"""
super(KravatteSAE, self).__init__(key, workers, mp_input, mp_output)
self.initialize_history(nonce)
def initialize_history(self, nonce: bytes) -> None:
"""
Initialize session history by storing Keccak collector state and current internal key
Args:
key (bytes): user provided bytes to be padded (if necessary) and computed into Kravatte base key
"""
self.collect_message(nonce)
self.history_collector = np.copy(self.collector)
self.history_key = np.copy(self.roll_key)
self.generate_digest(self.TAG_SIZE)
self.tag = self.digest.copy()
def wrap(self, plaintext: bytes, metadata: bytes) -> KravatteTagOutput:
"""
Encrypt an arbitrary plaintext message using the included metadata as part of an on-going
session. Creates authentication tag for validation during decryption.
Args:
plaintext (bytes): user plaintext of arbitrary length
metadata (bytes): associated data to ensure a unique encryption permutation
Returns:
(bytes, bytes): encrypted cipher text and authentication tag
"""
# Restore Kravatte State to When Latest History was Absorbed
self.collector = np.copy(self.history_collector)
self.roll_key = np.copy(self.history_key)
self.digest = bytearray(b'')
self.digest_active = False
# Generate/Apply Key Stream
self.generate_digest(len(plaintext) + self.OFFSET)
ciphertext = bytes([p_text ^ key_stream for p_text, key_stream in zip(plaintext, self.digest[self.OFFSET:])])
# Update History
if len(metadata) > 0 or len(plaintext) == 0:
self._append_to_history(metadata, 0)
if len(plaintext) > 0:
self._append_to_history(ciphertext, 1)
self.history_collector = np.copy(self.collector)
self.history_key = np.copy(self.roll_key)
# Generate Tag
self.generate_digest(self.TAG_SIZE)
return ciphertext, self.digest
def unwrap(self, ciphertext: bytes, metadata: bytes, validation_tag: bytes) -> KravatteValidatedOutput:
"""
Decrypt an arbitrary ciphertext message using the included metadata as part of an on-going
session. Creates authentication tag for validation during decryption.
Args:
ciphertext (bytes): user ciphertext of arbitrary length
metadata (bytes): associated data from encryption
validation_tag (bytes): collection of bytes that authenticates the decrypted plaintext as
being encrypted with the same secret key
Returns:
(bytes, bool): decrypted plaintext and boolean indicating in decryption was authenticated against secret key
"""
# Restore Kravatte State to When Latest History was Absorbed
self.collector = np.copy(self.history_collector)
self.roll_key = np.copy(self.history_key)
self.digest = bytearray(b'')
self.digest_active = False
# Generate/Apply Key Stream
self.generate_digest(len(ciphertext) + self.OFFSET)
plaintext = bytes([p_text ^ key_stream for p_text, key_stream in zip(ciphertext, self.digest[self.OFFSET:])])
# Update History
if len(metadata) > 0 or len(ciphertext) == 0:
self._append_to_history(metadata, 0)
if len(ciphertext) > 0:
self._append_to_history(ciphertext, 1)
self.history_collector = np.copy(self.collector)
self.history_key = np.copy(self.roll_key)
# Generate Tag
self.generate_digest(self.TAG_SIZE)
# Store Generated Tag and Validate
self.tag = self.digest.copy()
valid_tag = self.compare_bytes(self.tag, validation_tag)
return plaintext, valid_tag
def _append_to_history(self, message: bytes, pad_bit: int) -> None:
"""
Update history collector state with provided message.
Args:
message (bytes): arbitrary number of bytes to be padded into Keccak blocks and absorbed into the collector
pad_bit (int): Either 1 or 0 to append to the end of the regular message before padding
"""
if self.digest_active:
self.collector = np.copy(self.history_collector)
self.roll_key = np.copy(self.history_key)
self.digest = bytearray(b'')
self.digest_active = False
self.roll_key = self._kravatte_roll_compress(self.roll_key)
# Pad Message with a single bit and then
start_len = len(message)
padded_len = start_len + (self.KECCAK_BYTES - (start_len % self.KECCAK_BYTES))
padded_bytes = self._pad_10_append(message, padded_len, pad_bit, 1)
absorb_steps = len(padded_bytes) // self.KECCAK_BYTES
# Absorb into Collector
for msg_block in range(absorb_steps):
m = np.frombuffer(padded_bytes, dtype=np.uint64, count=25, offset=msg_block * self.KECCAK_BYTES).reshape([5, 5], order='F')
m_k = m ^ self.roll_key
self.roll_key = self._kravatte_roll_compress(self.roll_key)
self.collector = self.collector ^ self._keccak(m_k)
class KravatteSANE(Kravatte):
"""
An authenticated encryption mode designed to track a session consisting of a series of messages,
metadata, and an initialization nonce. A replacement for KravatteSAE
"""
TAG_SIZE = 16
OFFSET = TAG_SIZE
"""
An authenticated encryption mode designed to track a session consisting of a series of messages
and an initialization nonce. A replacement for KravatteSAE
"""
def __init__(self, nonce: bytes, key: bytes=b'', workers: int=None, mp_input: bool=True,
mp_output: bool=True):
"""
Initialize KravatteSANE with user key and nonce
Args:
nonce (bytes) - random unique value to initialize the session with
key (bytes) - secret key for encrypting session messages
workers (int): parallel processes to use in compression/expansion operations
mp_input (bool): Enable multi-processing for calculations on input data
mp_output (bool): Enable multi-processing for calculations on output data
"""
super(KravatteSANE, self).__init__(key, workers, mp_input, mp_output)
self.initialize_history(nonce, False)
def initialize_history(self, nonce: bytes, reinitialize: bool=True) -> None:
"""
Initialize session history. Session history is stored pre-compressed within the Keccak collector
and current matching internal key state. Kravatte-SANE session history starts with the user
provided nonce.
Args:
nonce (bytes): user provided bytes to initialize the session history
reinitialize (bool): perform a full reset of the Keccak state when manually restarting the history log
"""
if reinitialize:
self.reset_state()
self.collect_message(nonce)
self.history_collector = np.copy(self.collector)
self.history_key = np.copy(self.roll_key)
self.generate_digest(self.TAG_SIZE)
self.tag = self.digest.copy()
self.e_attr = 0
def wrap(self, plaintext: bytes, metadata: bytes) -> KravatteTagOutput:
"""
Encrypt an arbitrary plaintext message using the included metadata as part of an on-going
session. Creates authentication tag for validation during decryption.
Args:
plaintext (bytes): user plaintext of arbitrary length
metadata (bytes): associated data to ensure a unique encryption permutation
Returns:
(bytes, bytes): encrypted cipher text and authentication tag
"""
# Restore Kravatte State to When Latest History was Absorbed
self.collector = np.copy(self.history_collector)
self.roll_key = np.copy(self.history_key)
self.digest = bytearray(b'')
self.digest_active = False
# Generate/Apply Key Stream
self.generate_digest(len(plaintext) + self.OFFSET)
ciphertext = bytes([p_text ^ key_stream for p_text, key_stream in zip(plaintext, self.digest[self.OFFSET:])])
# Restore/Update History States if required
self._restore_history()
if len(metadata) > 0 or len(plaintext) == 0:
self._append_to_history(metadata, (self.e_attr << 1) | 0, 2)
if len(plaintext) > 0:
self._append_to_history(ciphertext, (self.e_attr << 1) | 1, 2)
# Increment e toggler attribute
self.e_attr ^= 1
# Generate Tag
self.generate_digest(self.TAG_SIZE)
return ciphertext, self.digest
def unwrap(self, ciphertext: bytes, metadata: bytes, validation_tag: bytes) -> KravatteValidatedOutput:
"""
Decrypt an arbitrary ciphertext message using the included metadata as part of an on-going
session. Validates decryption based on the provided authentication tag.
Args:
ciphertext (bytes): user ciphertext of arbitrary length
metadata (bytes): associated data from encryption
validation_tag (bytes): collection of bytes that authenticates the decrypted plaintext as
being encrypted with the same secret key
Returns:
(bytes, bool): decrypted plaintext and boolean indicating in decryption was authenticated against secret key
"""
# Restore Kravatte State to When Latest History was Absorbed
self.collector = np.copy(self.history_collector)
self.roll_key = np.copy(self.history_key)
self.digest = bytearray(b'')
self.digest_active = False
# Generate/Apply Key Stream
self.generate_digest(len(ciphertext) + self.OFFSET)
plaintext = bytes([p_text ^ key_stream for p_text, key_stream in zip(ciphertext, self.digest[self.OFFSET:])])
# Restore/Update History States if required
self._restore_history()
if len(metadata) > 0 or len(ciphertext) == 0:
self._append_to_history(metadata, (self.e_attr << 1) | 0, 2)
if len(ciphertext) > 0:
self._append_to_history(ciphertext, (self.e_attr << 1) | 1, 2)
# Increment e toggler attribute
self.e_attr ^= 1
# Generate Tag
self.generate_digest(self.TAG_SIZE)
# Store Generated Tag and Validate
self.tag = self.digest.copy()
valid_tag = self.compare_bytes(self.tag, validation_tag)
return plaintext, valid_tag
def _append_to_history(self, message: bytes, pad_bits: int, pad_size: int) -> None:
"""
Update history collector state with provided message.
Args:
message (bytes): arbitrary number of bytes to be padded into Keccak blocks and absorbed into the collector
pad_bits (int): Up to 6 additional bits added to the end of the regular message before padding
pad_size (int): Number of bits to append
"""
self.collect_message(message, pad_bits, pad_size)
self.history_collector = np.copy(self.collector)
self.history_key = np.copy(self.roll_key)
def _restore_history(self) -> None:
"""
Restore the internal kravatte state to the previously saved history state
Args:
None
"""
self.collector = np.copy(self.history_collector)
self.roll_key = np.copy(self.history_key)
self.digest = bytearray(b'')
self.digest_active = False
class KravatteSANSE(Kravatte):
"""
A nonce-less authenticated encryption mode designed to track a session consisting of a series of
messages and metadata. A replacement for Kravatte-SIV
"""
TAG_SIZE = 32
def __init__(self, key: bytes=b'', workers: int=None, mp_input: bool=True, mp_output: bool=True):
"""
Initialize KravatteSANSE with user key
Args:
key (bytes) - secret key for encrypting/decrypting session messages
workers (int): parallel processes to use in compression/expansion operations
mp_input (bool): Enable multi-processing for calculations on input data
mp_output (bool): Enable multi-processing for calculations on output data
"""
super(KravatteSANSE, self).__init__(key, workers, mp_input, mp_output)
self.initialize_history(False)
def initialize_history(self, reinitialize: bool=True) -> None:
"""
Initialize session history. Session history is stored pre-compressed within the Keccak collector
and current matching internal key state. Kravatte-SANSE session history starts empty.
Args:
reinitialize (bool): perform a full reset of the Keccak state when manually restarting the history log
"""
if reinitialize:
self.reset_state()
self.history_collector = np.copy(self.collector)
self.history_key = np.copy(self.roll_key)
self.history_collector_state = np.copy(self.new_collector)
self.e_attr = 0
def wrap(self, plaintext: bytes, metadata: bytes) -> KravatteTagOutput:
"""
Encrypt an arbitrary plaintext message using the included metadata as part of an on-going
session. Creates authentication tag for validation during decryption.
Args:
plaintext (bytes): user plaintext of arbitrary length
metadata (bytes): associated data to ensure a unique encryption permutation
Returns:
(bytes, bytes): encrypted cipher text and authentication tag
"""
# Restore Kravatte State to When Latest History was Absorbed
self._restore_history()
# Update History
if len(metadata) > 0 or len(plaintext) == 0:
self._append_to_history(metadata, (self.e_attr << 1) | 0, 2)
if len(plaintext) > 0:
# Generate Tag
self.collect_message(plaintext, (self.e_attr << 2) | 0b10, 3)
self.generate_digest(self.TAG_SIZE)
tag = self.digest
# Reset History State and Generate/Apply Key Stream
self._restore_history()
self.collect_message(tag, ((self.e_attr << 2) | 0b11), 3)
self.generate_digest(len(plaintext))
ciphertext = bytes([p_text ^ key_stream for p_text, key_stream in zip(plaintext, self.digest)])
# Reset History State and Update it with Plaintext and Padding
self._restore_history()
self._append_to_history(plaintext, (self.e_attr << 2) | 0b10, 3)
else:
ciphertext = b''
self.generate_digest(self.TAG_SIZE)
tag = self.digest
self.e_attr ^= 1
return ciphertext, tag
def unwrap(self, ciphertext: bytes, metadata: bytes, validation_tag: bytes) -> KravatteValidatedOutput:
"""
Decrypt an arbitrary ciphertext message using the included metadata as part of an on-going
session. Validates decryption based on the provided authentication tag.
Args:
ciphertext (bytes): user ciphertext of arbitrary length
metadata (bytes): associated data from encryption
validation_tag (bytes): collection of bytes that authenticates the decrypted plaintext as
being encrypted with the same secret key
Returns:
(bytes, bool): decrypted plaintext and boolean indicating in decryption was authenticated against secret key
"""
# Restore Kravatte State to When Latest History was Absorbed
self._restore_history()
if len(metadata) > 0 or len(ciphertext) == 0:
self._append_to_history(metadata, (self.e_attr << 1) | 0, 2)
if len(ciphertext) > 0:
self.collect_message(validation_tag, ((self.e_attr << 2) | 0b11), 3)
self.generate_digest(len(ciphertext))
plaintext = bytes([p_text ^ key_stream for p_text, key_stream in zip(ciphertext, self.digest)])
# Update History
self._restore_history()
self._append_to_history(plaintext, (self.e_attr << 2) | 0b10, 3)
else:
plaintext = b''
# Generate Tag
self.generate_digest(self.TAG_SIZE)
self.e_attr ^= 1
# Store Generated Tag and Validate
self.tag = self.digest.copy()
valid_tag = self.compare_bytes(self.tag, validation_tag)
return plaintext, valid_tag
def _append_to_history(self, message: bytes, pad_bits: int, pad_size: int) -> None:
"""
Update history collector state with provided message. Save the new history state.
Args:
message (bytes): arbitrary number of bytes to be padded into Keccak blocks and absorbed into the collector
pad_bits (int): Up to 6 additional bits added to the end of the regular message before padding
pad_size (int): Number of bits to append
"""
self.collect_message(message, pad_bits, pad_size)
self.history_collector = np.copy(self.collector)
self.history_key = np.copy(self.roll_key)
self.history_collector_state = np.copy(self.new_collector)
def _restore_history(self) -> None:
"""
Restore the internal kravatte state to the previously saved history state
Args:
None
"""
self.collector = np.copy(self.history_collector)
self.roll_key = np.copy(self.history_key)
self.new_collector = np.copy(self.history_collector_state)
self.digest = bytearray(b'')
self.digest_active = False
class KravatteWBC(Kravatte):
""" Configurable Wide Block Cipher encryption mode with customization tweak """
SPLIT_THRESHOLD = 398
def __init__(self, block_cipher_size: int, tweak: bytes=b'', key: bytes=b'', workers: int=None,
mp_input: bool=True, mp_output: bool=True):
"""
Initialize KravatteWBC object
Inputs:
block_cipher_size (int) - size of block cipher in bytes
tweak (bytes) - arbitrary value to customize cipher output
key (bytes) - secret key for encrypting message blocks
workers (int): parallel processes to use in compression/expansion operations
mp_input (bool): Enable multi-processing for calculations on input data
mp_output (bool): Enable multi-processing for calculations on output data
"""
super(KravatteWBC, self).__init__(key, workers, mp_input, mp_output)
self.split_bytes(block_cipher_size)
self.tweak = tweak
def split_bytes(self, message_size_bytes: int) -> None:
"""
Calculates the size (in bytes) of the "left" and "right" components of the block encryption
decryption process. Based on algorithm given in Farfalle spec.
Input
message_size_bytes (int): user defined block size for this instance of KravatteWBC
"""
if message_size_bytes <= self.SPLIT_THRESHOLD:
nL = ceil(message_size_bytes / 2)
else:
q = floor(((message_size_bytes + 1) / self.KECCAK_BYTES)) + 1
x = floor(log2(q - 1))
nL = ((q - (2**x)) * self.KECCAK_BYTES) - 1
self.size_L = nL
self.size_R = message_size_bytes - nL
def encrypt(self, message: bytes) -> bytes:
"""
Encrypt a user message using KravatteWBC mode
Inputs:
message (bytes): plaintext message to encrypt. Length should be <= the block cipher size
defined in the KravatteWBC object
Returns:
bytes: encrypted block same length as message
"""
L = message[0:self.size_L]
R = message[self.size_L:]
# R0 ← R0 + HK(L||0), with R0 the first min(b, |R|) bits of R
self.collect_message(L, append_bits=0b0, append_bit_count=1)
self.generate_digest(min(self.KECCAK_BYTES, self.size_R), short_kravatte=True)
extended_digest = self.digest + ((self.size_R - len(self.digest)) * b'\x00')
R = bytes([p_text ^ key_stream for p_text, key_stream in zip(R, extended_digest)])
# L ← L + GK (R||1 ◦ W)
self.collect_message(self.tweak)
self.collect_message(R, append_bits=0b1, append_bit_count=1)
self.generate_digest(self.size_L)
L = bytes([p_text ^ key_stream for p_text, key_stream in zip(L, self.digest)])
# R ← R + GK (L||0 ◦ W)
self.collect_message(self.tweak)
self.collect_message(L, append_bits=0b0, append_bit_count=1)
self.generate_digest(self.size_R)
R = bytes([p_text ^ key_stream for p_text, key_stream in zip(R, self.digest)])
# L0 ← L0 + HK(R||1), with L0 the first min(b, |L|) bits of L
self.collect_message(R, append_bits=0b1, append_bit_count=1)
self.generate_digest(min(self.KECCAK_BYTES, self.size_L), short_kravatte=True)
extended_digest = self.digest + ((self.size_L - len(self.digest)) * b'\x00')
L = bytes([p_text ^ key_stream for p_text, key_stream in zip(L, extended_digest)])
# C ← the concatenation of L and R
return L + R
def decrypt(self, ciphertext: bytes) -> bytes:
"""
Decrypt a user message using KravatteWBC mode
Args:
message (bytes): ciphertext message to decrypt.
Returns:
bytes: decrypted block same length as ciphertext
"""
L = ciphertext[0:self.size_L]
R = ciphertext[self.size_L:]
# L0 ← L0 + HK(R||1), with L0 the first min(b, |L|) bits of L
self.collect_message(R, append_bits=0b1, append_bit_count=1)
self.generate_digest(min(self.KECCAK_BYTES, self.size_L), short_kravatte=True)
extended_digest = self.digest + ((self.size_L - len(self.digest)) * b'\x00')
L = bytes([c_text ^ key_stream for c_text, key_stream in zip(L, extended_digest)])
# R ← R + GK (L||0 ◦ W)
self.collect_message(self.tweak)
self.collect_message(L, append_bits=0b0, append_bit_count=1)
self.generate_digest(self.size_R)
R = bytes([c_text ^ key_stream for c_text, key_stream in zip(R, self.digest)])
# L ← L + GK (R||1 ◦ W)
self.collect_message(self.tweak)
self.collect_message(R, append_bits=0b1, append_bit_count=1)
self.generate_digest(self.size_L)
L = bytes([c_text ^ key_stream for c_text, key_stream in zip(L, self.digest)])
# R0 ← R0 + HK(L||0), with R0 the first min(b, |R|) bits of R
self.collect_message(L, append_bits=0b0, append_bit_count=1)
self.generate_digest(min(self.KECCAK_BYTES, self.size_R), short_kravatte=True)
extended_digest = self.digest + ((self.size_R - len(self.digest)) * b'\x00')
R = bytes([c_text ^ key_stream for c_text, key_stream in zip(R, extended_digest)])
# P ← the concatenation of L and R
return L + R
class KravatteWBC_AE(KravatteWBC):
""" Authentication with associated metadata version Kravatte Wide Block Cipher encryption mode """
WBC_AE_TAG_LEN = 16
def __init__(self, block_cipher_size: int, key: bytes=b'', workers: int=None,
mp_input: bool=True, mp_output: bool=True):
"""
Initialize KravatteWBC_AE object
Args:
block_cipher_size (int) - size of block cipher in bytes
key (bytes) - secret key for encrypting message blocks
workers (int): parallel processes to use in compression/expansion operations
mp_input (bool): Enable multi-processing for calculations on input data
mp_output (bool): Enable multi-processing for calculations on output data
"""
super(KravatteWBC_AE, self).__init__(block_cipher_size + self.WBC_AE_TAG_LEN, b'', key=key,
workers=workers, mp_input=mp_input,
mp_output=mp_output)
def wrap(self, message: bytes, metadata: bytes) -> bytes:
"""
Encrypt a user message and generate included authenticated data. Requires metadata input
in lieu of customization tweak.
Args:
message (bytes): User message same length as configured object block size
metadata (bytes): associated metadata to ensure unique output
Returns:
bytes: authenticated encrypted block
"""
self.tweak = metadata # metadata treated as tweak
padded_message = message + (self.WBC_AE_TAG_LEN * b'\x00')
return self.encrypt(padded_message)
def unwrap(self, ciphertext: bytes, metadata: bytes) -> KravatteValidatedOutput:
"""
Decrypt a ciphertext block and validate included authenticated data. Requires metadata input
in lieu of customization tweak.
Args:
message (bytes): ciphertext same length as configured object block size
metadata (bytes): associated metadata to ensure unique output
Returns:
(bytes, bool): plaintext byes and decryption valid flag
"""
L = ciphertext[0:self.size_L]
R = ciphertext[self.size_L:]
self.tweak = metadata
# L0 ← L0 + HK(R||1), with L0 the first min(b, |L|) bits of L
self.collect_message(R, append_bits=0b1, append_bit_count=1)
self.generate_digest(min(self.KECCAK_BYTES, self.size_L), short_kravatte=True)
extended_digest = self.digest + ((self.size_L - len(self.digest)) * b'\x00')
L = bytes([c_text ^ key_stream for c_text, key_stream in zip(L, extended_digest)])
# R ← R + GK (L||0 ◦ A)
self.collect_message(self.tweak)
self.collect_message(L, append_bits=0b0, append_bit_count=1)
self.generate_digest(self.size_R)
R = bytes([c_text ^ key_stream for c_text, key_stream in zip(R, self.digest)])
# |R| ≥ b+t
if self.size_R >= self.KECCAK_BYTES + self.WBC_AE_TAG_LEN:
# if the last t bytes of R ̸= 0t then return error!
valid_plaintext = True if R[-self.WBC_AE_TAG_LEN:] == (self.WBC_AE_TAG_LEN * b'\x00') else False
# L ← L + GK (R||1 ◦ A)
self.collect_message(self.tweak)
self.collect_message(R, append_bits=0b1, append_bit_count=1)
self.generate_digest(self.size_L)
L = bytes([c_text ^ key_stream for c_text, key_stream in zip(L, self.digest)])
# R0 ← R0 + HK(L||0), with R0 the first b bytes of R
self.collect_message(L, append_bits=0b0, append_bit_count=1)
self.generate_digest(self.KECCAK_BYTES, short_kravatte=True)
extended_digest = self.digest + ((self.size_R - len(self.digest)) * b'\x00')
R = bytes([c_text ^ key_stream for c_text, key_stream in zip(R, extended_digest)])
else:
# L ← L + GK (R||1 ◦ A)
self.collect_message(self.tweak)
self.collect_message(R, append_bits=0b1, append_bit_count=1)
self.generate_digest(self.size_L)
L = bytes([c_text ^ key_stream for c_text, key_stream in zip(L, self.digest)])
# R0 ← R0 + HK(L||0), with R0 the first min(b, |R|) bytes of R
self.collect_message(L, append_bits=0b0, append_bit_count=1)
self.generate_digest(min(self.KECCAK_BYTES, self.size_R), short_kravatte=True)
extended_digest = self.digest + ((self.size_R - len(self.digest)) * b'\x00')
R = bytes([c_text ^ key_stream for c_text, key_stream in zip(R, extended_digest)])
# if the last t bytes of L||R ̸= 0t then return error!
valid_plaintext = True if (L + R)[-self.WBC_AE_TAG_LEN:] == (self.WBC_AE_TAG_LEN * b'\x00') else False
# P′ ← L||R
return (L + R)[:-self.WBC_AE_TAG_LEN], valid_plaintext
class KravatteOracle(Kravatte):
"""Pseudo-random byte stream generator. Accepts an authentication key and arbitrary sized seed
input. Once initialized, the random method can return an arbitrary amount of random output bytes
for each call. Generator collector state can be reinitialized at anytime with the seed_generator
method
"""
def __init__(self, seed: bytes=b'', key: bytes=b'', workers: int=None, mp_input: bool=True,
mp_output: bool=True):
"""
Initialize KravatteOracle with user key and seed.
Inputs:
seed (bytes) - random unique value to initialize the oracle object with
key (bytes) - secret key for authenticating generator
workers (int): parallel processes to use in compression/expansion operations
mp_input (bool): Enable multi-processing for calculations on input data
mp_output (bool): Enable multi-processing for calculations on output data
"""
super(KravatteOracle, self).__init__(key, workers, mp_input, mp_input)
self.seed_generator(seed)
def seed_generator(self, seed: bytes):
"""
Re-seed Kravatte collector state with new seed data.
Input:
seed (bytes): Collection of seed bytes that are absorbed as single message
"""
self.collect_message(seed)
def random(self, output_size: int) -> bytearray:
"""
Generates a stream of pseudo-random bytes from the current state of the Kravatte collector
state
Input:
output_size (bytes): Number of bytes to return
Returns:
bytearray: Pseudo-random Kravatte squeezed collector output
"""
self.generate_digest(output_size)
return self.digest
if __name__ == "__main__":
from time import perf_counter
import hashlib
from binascii import hexlify
import os
my_key = b'\xFF' * 32
my_message = bytes([x % 256 for x in range(4 * 1024 * 1024)])
print("Normal Message MAC Generation")
start = perf_counter()
my_kra = mac(my_key, my_message, 1024 * 1024 * 4)
stop = perf_counter()
print("Process Time:", stop - start)
a1 = hashlib.md5()
a1.update(my_kra)
print(hexlify(a1.digest()))
print("%d Process/Core Message MAC Generation" % os.cpu_count())
start = perf_counter()
my_kra = mac(my_key, my_message, 1024 * 1024 * 4, workers=os.cpu_count())
stop = perf_counter()
print("Process Time:", stop - start)
a2 = hashlib.md5()
a2.update(my_kra)
print(hexlify(a2.digest()))
assert a1.digest() == a2.digest()
| [
"numpy.copy",
"math.ceil",
"hashlib.md5",
"numpy.bitwise_xor.reduce",
"math.floor",
"math.log2",
"time.perf_counter",
"numpy.array",
"numpy.zeros",
"numpy.uint64",
"multiprocessing.Pool",
"os.cpu_count",
"numpy.frombuffer",
"ctypes.memset"
] | [((870, 1005), 'numpy.array', 'np.array', (['[32778, 9223372039002259466, 9223372039002292353, 9223372036854808704, \n 2147483649, 9223372039002292232]'], {'dtype': 'np.uint64'}), '([32778, 9223372039002259466, 9223372039002292353, \n 9223372036854808704, 2147483649, 9223372039002292232], dtype=np.uint64)\n', (878, 1005), True, 'import numpy as np\n'), ((1177, 1312), 'numpy.array', 'np.array', (['[[0, 36, 3, 41, 18], [1, 44, 10, 45, 2], [62, 6, 43, 15, 61], [28, 55, 25, \n 21, 56], [27, 20, 39, 8, 14]]'], {'dtype': 'np.uint64'}), '([[0, 36, 3, 41, 18], [1, 44, 10, 45, 2], [62, 6, 43, 15, 61], [28,\n 55, 25, 21, 56], [27, 20, 39, 8, 14]], dtype=np.uint64)\n', (1185, 1312), True, 'import numpy as np\n'), ((1573, 1673), 'numpy.array', 'np.array', (['[[0, 3, 1, 4, 2], [1, 4, 2, 0, 3], [2, 0, 3, 1, 4], [3, 1, 4, 2, 0], [4, 2,\n 0, 3, 1]]'], {}), '([[0, 3, 1, 4, 2], [1, 4, 2, 0, 3], [2, 0, 3, 1, 4], [3, 1, 4, 2, 0\n ], [4, 2, 0, 3, 1]])\n', (1581, 1673), True, 'import numpy as np\n'), ((1861, 1961), 'numpy.array', 'np.array', (['[[0, 0, 0, 0, 0], [1, 1, 1, 1, 1], [2, 2, 2, 2, 2], [3, 3, 3, 3, 3], [4, 4,\n 4, 4, 4]]'], {}), '([[0, 0, 0, 0, 0], [1, 1, 1, 1, 1], [2, 2, 2, 2, 2], [3, 3, 3, 3, 3\n ], [4, 4, 4, 4, 4]])\n', (1869, 1961), True, 'import numpy as np\n'), ((2167, 2267), 'numpy.array', 'np.array', (['[[0, 0, 0, 0, 1], [1, 1, 1, 1, 2], [2, 2, 2, 2, 3], [3, 3, 3, 3, 4], [4, 4,\n 4, 4, 0]]'], {}), '([[0, 0, 0, 0, 1], [1, 1, 1, 1, 2], [2, 2, 2, 2, 3], [3, 3, 3, 3, 4\n ], [4, 4, 4, 4, 0]])\n', (2175, 2267), True, 'import numpy as np\n'), ((2491, 2591), 'numpy.array', 'np.array', (['[[0, 1, 2, 3, 4], [0, 1, 2, 3, 4], [0, 1, 2, 3, 4], [0, 1, 2, 3, 4], [0, 1,\n 2, 3, 4]]'], {}), '([[0, 1, 2, 3, 4], [0, 1, 2, 3, 4], [0, 1, 2, 3, 4], [0, 1, 2, 3, 4\n ], [0, 1, 2, 3, 4]])\n', (2499, 2591), True, 'import numpy as np\n'), ((2825, 2925), 'numpy.array', 'np.array', (['[[0, 0, 0, 1, 1], [1, 1, 1, 2, 2], [2, 2, 2, 3, 3], [3, 3, 3, 4, 4], [4, 4,\n 4, 0, 0]]'], {}), '([[0, 0, 0, 1, 1], [1, 1, 1, 2, 2], [2, 2, 2, 3, 3], [3, 3, 3, 4, 4\n ], [4, 4, 4, 0, 0]])\n', (2833, 2925), True, 'import numpy as np\n'), ((3137, 3237), 'numpy.array', 'np.array', (['[[0, 1, 2, 3, 4], [0, 1, 2, 3, 4], [0, 1, 2, 3, 4], [0, 1, 2, 3, 4], [0, 1,\n 2, 4, 4]]'], {}), '([[0, 1, 2, 3, 4], [0, 1, 2, 3, 4], [0, 1, 2, 3, 4], [0, 1, 2, 3, 4\n ], [0, 1, 2, 4, 4]])\n', (3145, 3237), True, 'import numpy as np\n'), ((54652, 54666), 'time.perf_counter', 'perf_counter', ([], {}), '()\n', (54664, 54666), False, 'from time import perf_counter\n'), ((54732, 54746), 'time.perf_counter', 'perf_counter', ([], {}), '()\n', (54744, 54746), False, 'from time import perf_counter\n'), ((54797, 54810), 'hashlib.md5', 'hashlib.md5', ([], {}), '()\n', (54808, 54810), False, 'import hashlib\n'), ((54947, 54961), 'time.perf_counter', 'perf_counter', ([], {}), '()\n', (54959, 54961), False, 'from time import perf_counter\n'), ((55051, 55065), 'time.perf_counter', 'perf_counter', ([], {}), '()\n', (55063, 55065), False, 'from time import perf_counter\n'), ((55116, 55129), 'hashlib.md5', 'hashlib.md5', ([], {}), '()\n', (55127, 55129), False, 'import hashlib\n'), ((5558, 5579), 'numpy.copy', 'np.copy', (['self.kra_key'], {}), '(self.kra_key)\n', (5565, 5579), True, 'import numpy as np\n'), ((5605, 5638), 'numpy.zeros', 'np.zeros', (['[5, 5]'], {'dtype': 'np.uint64'}), '([5, 5], dtype=np.uint64)\n', (5613, 5638), True, 'import numpy as np\n'), ((12299, 12319), 'numpy.copy', 'np.copy', (['input_array'], {}), '(input_array)\n', (12306, 12319), True, 'import numpy as np\n'), ((13771, 13791), 'numpy.copy', 'np.copy', (['input_array'], {}), '(input_array)\n', (13778, 13791), True, 'import numpy as np\n'), ((15407, 15455), 'ctypes.memset', 'memset', (['collector_location', '(0)', 'self.KECCAK_BYTES'], {}), '(collector_location, 0, self.KECCAK_BYTES)\n', (15413, 15455), False, 'from ctypes import memset\n'), ((15556, 15598), 'ctypes.memset', 'memset', (['key_location', '(0)', 'self.KECCAK_BYTES'], {}), '(key_location, 0, self.KECCAK_BYTES)\n', (15562, 15598), False, 'from ctypes import memset\n'), ((15703, 15745), 'ctypes.memset', 'memset', (['key_location', '(0)', 'self.KECCAK_BYTES'], {}), '(key_location, 0, self.KECCAK_BYTES)\n', (15709, 15745), False, 'from ctypes import memset\n'), ((25002, 25025), 'numpy.copy', 'np.copy', (['self.collector'], {}), '(self.collector)\n', (25009, 25025), True, 'import numpy as np\n'), ((25053, 25075), 'numpy.copy', 'np.copy', (['self.roll_key'], {}), '(self.roll_key)\n', (25060, 25075), True, 'import numpy as np\n'), ((25789, 25820), 'numpy.copy', 'np.copy', (['self.history_collector'], {}), '(self.history_collector)\n', (25796, 25820), True, 'import numpy as np\n'), ((25845, 25870), 'numpy.copy', 'np.copy', (['self.history_key'], {}), '(self.history_key)\n', (25852, 25870), True, 'import numpy as np\n'), ((26402, 26425), 'numpy.copy', 'np.copy', (['self.collector'], {}), '(self.collector)\n', (26409, 26425), True, 'import numpy as np\n'), ((26453, 26475), 'numpy.copy', 'np.copy', (['self.roll_key'], {}), '(self.roll_key)\n', (26460, 26475), True, 'import numpy as np\n'), ((27451, 27482), 'numpy.copy', 'np.copy', (['self.history_collector'], {}), '(self.history_collector)\n', (27458, 27482), True, 'import numpy as np\n'), ((27507, 27532), 'numpy.copy', 'np.copy', (['self.history_key'], {}), '(self.history_key)\n', (27514, 27532), True, 'import numpy as np\n'), ((28067, 28090), 'numpy.copy', 'np.copy', (['self.collector'], {}), '(self.collector)\n', (28074, 28090), True, 'import numpy as np\n'), ((28118, 28140), 'numpy.copy', 'np.copy', (['self.roll_key'], {}), '(self.roll_key)\n', (28125, 28140), True, 'import numpy as np\n'), ((31648, 31671), 'numpy.copy', 'np.copy', (['self.collector'], {}), '(self.collector)\n', (31655, 31671), True, 'import numpy as np\n'), ((31699, 31721), 'numpy.copy', 'np.copy', (['self.roll_key'], {}), '(self.roll_key)\n', (31706, 31721), True, 'import numpy as np\n'), ((32459, 32490), 'numpy.copy', 'np.copy', (['self.history_collector'], {}), '(self.history_collector)\n', (32466, 32490), True, 'import numpy as np\n'), ((32515, 32540), 'numpy.copy', 'np.copy', (['self.history_key'], {}), '(self.history_key)\n', (32522, 32540), True, 'import numpy as np\n'), ((34187, 34218), 'numpy.copy', 'np.copy', (['self.history_collector'], {}), '(self.history_collector)\n', (34194, 34218), True, 'import numpy as np\n'), ((34243, 34268), 'numpy.copy', 'np.copy', (['self.history_key'], {}), '(self.history_key)\n', (34250, 34268), True, 'import numpy as np\n'), ((35753, 35776), 'numpy.copy', 'np.copy', (['self.collector'], {}), '(self.collector)\n', (35760, 35776), True, 'import numpy as np\n'), ((35804, 35826), 'numpy.copy', 'np.copy', (['self.roll_key'], {}), '(self.roll_key)\n', (35811, 35826), True, 'import numpy as np\n'), ((36031, 36062), 'numpy.copy', 'np.copy', (['self.history_collector'], {}), '(self.history_collector)\n', (36038, 36062), True, 'import numpy as np\n'), ((36087, 36112), 'numpy.copy', 'np.copy', (['self.history_key'], {}), '(self.history_key)\n', (36094, 36112), True, 'import numpy as np\n'), ((37567, 37590), 'numpy.copy', 'np.copy', (['self.collector'], {}), '(self.collector)\n', (37574, 37590), True, 'import numpy as np\n'), ((37618, 37640), 'numpy.copy', 'np.copy', (['self.roll_key'], {}), '(self.roll_key)\n', (37625, 37640), True, 'import numpy as np\n'), ((37680, 37707), 'numpy.copy', 'np.copy', (['self.new_collector'], {}), '(self.new_collector)\n', (37687, 37707), True, 'import numpy as np\n'), ((41753, 41776), 'numpy.copy', 'np.copy', (['self.collector'], {}), '(self.collector)\n', (41760, 41776), True, 'import numpy as np\n'), ((41804, 41826), 'numpy.copy', 'np.copy', (['self.roll_key'], {}), '(self.roll_key)\n', (41811, 41826), True, 'import numpy as np\n'), ((41866, 41893), 'numpy.copy', 'np.copy', (['self.new_collector'], {}), '(self.new_collector)\n', (41873, 41893), True, 'import numpy as np\n'), ((42098, 42129), 'numpy.copy', 'np.copy', (['self.history_collector'], {}), '(self.history_collector)\n', (42105, 42129), True, 'import numpy as np\n'), ((42154, 42179), 'numpy.copy', 'np.copy', (['self.history_key'], {}), '(self.history_key)\n', (42161, 42179), True, 'import numpy as np\n'), ((42209, 42246), 'numpy.copy', 'np.copy', (['self.history_collector_state'], {}), '(self.history_collector_state)\n', (42216, 42246), True, 'import numpy as np\n'), ((8874, 8902), 'multiprocessing.Pool', 'Pool', ([], {'processes': 'self.workers'}), '(processes=self.workers)\n', (8878, 8902), False, 'from multiprocessing import Pool\n'), ((11644, 11672), 'multiprocessing.Pool', 'Pool', ([], {'processes': 'self.workers'}), '(processes=self.workers)\n', (11648, 11672), False, 'from multiprocessing import Pool\n'), ((28846, 28877), 'numpy.copy', 'np.copy', (['self.history_collector'], {}), '(self.history_collector)\n', (28853, 28877), True, 'import numpy as np\n'), ((28906, 28931), 'numpy.copy', 'np.copy', (['self.history_key'], {}), '(self.history_key)\n', (28913, 28931), True, 'import numpy as np\n'), ((43752, 43780), 'math.ceil', 'ceil', (['(message_size_bytes / 2)'], {}), '(message_size_bytes / 2)\n', (43756, 43780), False, 'from math import floor, ceil, log2\n'), ((54919, 54933), 'os.cpu_count', 'os.cpu_count', ([], {}), '()\n', (54931, 54933), False, 'import os\n'), ((55024, 55038), 'os.cpu_count', 'os.cpu_count', ([], {}), '()\n', (55036, 55038), False, 'import os\n'), ((4318, 4329), 'os.cpu_count', 'cpu_count', ([], {}), '()\n', (4327, 4329), False, 'from os import cpu_count\n'), ((4869, 4943), 'numpy.frombuffer', 'np.frombuffer', (['key_pad'], {'dtype': 'np.uint64', 'count': 'self.KECCAK_LANES', 'offset': '(0)'}), '(key_pad, dtype=np.uint64, count=self.KECCAK_LANES, offset=0)\n', (4882, 4943), True, 'import numpy as np\n'), ((12531, 12601), 'numpy.bitwise_xor.reduce', 'np.bitwise_xor.reduce', (['state[self.THETA_REORDER[0],]', '(1)'], {'keepdims': '(True)'}), '(state[self.THETA_REORDER[0],], 1, keepdims=True)\n', (12552, 12601), True, 'import numpy as np\n'), ((12605, 12681), 'numpy.bitwise_xor.reduce', 'np.bitwise_xor.reduce', (['array_shift[self.THETA_REORDER[1],]', '(1)'], {'keepdims': '(True)'}), '(array_shift[self.THETA_REORDER[1],], 1, keepdims=True)\n', (12626, 12681), True, 'import numpy as np\n'), ((14003, 14073), 'numpy.bitwise_xor.reduce', 'np.bitwise_xor.reduce', (['state[self.THETA_REORDER[0],]', '(1)'], {'keepdims': '(True)'}), '(state[self.THETA_REORDER[0],], 1, keepdims=True)\n', (14024, 14073), True, 'import numpy as np\n'), ((14077, 14153), 'numpy.bitwise_xor.reduce', 'np.bitwise_xor.reduce', (['array_shift[self.THETA_REORDER[1],]', '(1)'], {'keepdims': '(True)'}), '(array_shift[self.THETA_REORDER[1],], 1, keepdims=True)\n', (14098, 14153), True, 'import numpy as np\n'), ((16373, 16385), 'numpy.uint64', 'np.uint64', (['(3)'], {}), '(3)\n', (16382, 16385), True, 'import numpy as np\n'), ((43811, 43862), 'math.floor', 'floor', (['((message_size_bytes + 1) / self.KECCAK_BYTES)'], {}), '((message_size_bytes + 1) / self.KECCAK_BYTES)\n', (43816, 43862), False, 'from math import floor, ceil, log2\n'), ((43891, 43902), 'math.log2', 'log2', (['(q - 1)'], {}), '(q - 1)\n', (43895, 43902), False, 'from math import floor, ceil, log2\n'), ((7459, 7551), 'numpy.frombuffer', 'np.frombuffer', (['kra_msg'], {'dtype': 'np.uint64', 'count': '(25)', 'offset': '(msg_block * self.KECCAK_BYTES)'}), '(kra_msg, dtype=np.uint64, count=25, offset=msg_block * self.\n KECCAK_BYTES)\n', (7472, 7551), True, 'import numpy as np\n'), ((12824, 12855), 'numpy.uint64', 'np.uint64', (['(64 - self.RHO_SHIFTS)'], {}), '(64 - self.RHO_SHIFTS)\n', (12833, 12855), True, 'import numpy as np\n'), ((14296, 14327), 'numpy.uint64', 'np.uint64', (['(64 - self.RHO_SHIFTS)'], {}), '(64 - self.RHO_SHIFTS)\n', (14305, 14327), True, 'import numpy as np\n'), ((17107, 17119), 'numpy.uint64', 'np.uint64', (['(1)'], {}), '(1)\n', (17116, 17119), True, 'import numpy as np\n'), ((29484, 29580), 'numpy.frombuffer', 'np.frombuffer', (['padded_bytes'], {'dtype': 'np.uint64', 'count': '(25)', 'offset': '(msg_block * self.KECCAK_BYTES)'}), '(padded_bytes, dtype=np.uint64, count=25, offset=msg_block *\n self.KECCAK_BYTES)\n', (29497, 29580), True, 'import numpy as np\n'), ((16243, 16255), 'numpy.uint64', 'np.uint64', (['(7)'], {}), '(7)\n', (16252, 16255), True, 'import numpy as np\n'), ((16275, 16288), 'numpy.uint64', 'np.uint64', (['(57)'], {}), '(57)\n', (16284, 16288), True, 'import numpy as np\n'), ((16900, 16912), 'numpy.uint64', 'np.uint64', (['(7)'], {}), '(7)\n', (16909, 16912), True, 'import numpy as np\n'), ((16938, 16951), 'numpy.uint64', 'np.uint64', (['(57)'], {}), '(57)\n', (16947, 16951), True, 'import numpy as np\n'), ((17003, 17016), 'numpy.uint64', 'np.uint64', (['(18)'], {}), '(18)\n', (17012, 17016), True, 'import numpy as np\n'), ((17042, 17055), 'numpy.uint64', 'np.uint64', (['(46)'], {}), '(46)\n', (17051, 17055), True, 'import numpy as np\n'), ((6171, 6263), 'numpy.frombuffer', 'np.frombuffer', (['kra_msg'], {'dtype': 'np.uint64', 'count': '(25)', 'offset': '(msg_block * self.KECCAK_BYTES)'}), '(kra_msg, dtype=np.uint64, count=25, offset=msg_block * self.\n KECCAK_BYTES)\n', (6184, 6263), True, 'import numpy as np\n')] |
from ics import Calendar, Event
from datetime import date, timedelta
from db import fetchall_dict
from rich import print
from flag import flag
c = Calendar()
def add_allday_event(c, event_start, event_name, event_description):
e = Event()
e.name = event_name
e.description = event_description
e.begin = event_start.isoformat()
e.end = (event_start + timedelta(days=1)).isoformat()
e.make_all_day()
c.events.add(e)
cities = fetchall_dict(
"""
select
u
, c.n
, concat_ws(', ', c.s,c.c) s
, v.video
from cities c
JOIN videos v ON v.id = c.id
order by (rank < 3500 and c.id < 3500) DESC
, ROW_NUMBER() OVER ( PARTITION BY u ) -- prefer to show many countries
, random()
--limit 2
"""
)
START_DATE = date.today()
for city in cities:
print(city)
add_allday_event(
c,
event_start=START_DATE,
event_name=flag(city["u"]) + " " + city["n"],
event_description=f"""{city["n"]} welcomes you !
{city["video"]}
{city["s"]}
""",
)
START_DATE += timedelta(2)
c.events
with open("my.ics", "w") as my_file:
my_file.writelines(c)
| [
"db.fetchall_dict",
"flag.flag",
"ics.Event",
"datetime.timedelta",
"ics.Calendar",
"rich.print",
"datetime.date.today"
] | [((149, 159), 'ics.Calendar', 'Calendar', ([], {}), '()\n', (157, 159), False, 'from ics import Calendar, Event\n'), ((457, 747), 'db.fetchall_dict', 'fetchall_dict', (['"""\nselect\n u\n , c.n\n , concat_ws(\', \', c.s,c.c) s\n , v.video\nfrom cities c\nJOIN videos v ON v.id = c.id\norder by (rank < 3500 and c.id < 3500) DESC\n , ROW_NUMBER() OVER ( PARTITION BY u ) -- prefer to show many countries\n , random()\n--limit 2\n"""'], {}), '(\n """\nselect\n u\n , c.n\n , concat_ws(\', \', c.s,c.c) s\n , v.video\nfrom cities c\nJOIN videos v ON v.id = c.id\norder by (rank < 3500 and c.id < 3500) DESC\n , ROW_NUMBER() OVER ( PARTITION BY u ) -- prefer to show many countries\n , random()\n--limit 2\n"""\n )\n', (470, 747), False, 'from db import fetchall_dict\n'), ((758, 770), 'datetime.date.today', 'date.today', ([], {}), '()\n', (768, 770), False, 'from datetime import date, timedelta\n'), ((239, 246), 'ics.Event', 'Event', ([], {}), '()\n', (244, 246), False, 'from ics import Calendar, Event\n'), ((795, 806), 'rich.print', 'print', (['city'], {}), '(city)\n', (800, 806), False, 'from rich import print\n'), ((1048, 1060), 'datetime.timedelta', 'timedelta', (['(2)'], {}), '(2)\n', (1057, 1060), False, 'from datetime import date, timedelta\n'), ((374, 391), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (383, 391), False, 'from datetime import date, timedelta\n'), ((892, 907), 'flag.flag', 'flag', (["city['u']"], {}), "(city['u'])\n", (896, 907), False, 'from flag import flag\n')] |
import logging
import requests
from settings_csv import ALGO_NFDOMAINS
# API documentation: https://editor.swagger.io/?url=https://api.testnet.nf.domains/info/openapi3.yaml
class NFDomainsAPI:
session = requests.Session()
def get_address(self, name):
endpoint = f"nfd/{name}"
params = {"view": "brief"}
data, status_code = self._query(ALGO_NFDOMAINS, endpoint, params)
if status_code == 200:
# https://docs.nf.domains/docs/faq#how-do-i-set-my-address-to-resolve-my-nfd
# If present, use the primary/deposit address, otherwise resolve to the owner address
if "caAlgo" in data:
return data["caAlgo"][0]
else:
return data["owner"]
else:
return None
def _query(self, base_url, endpoint, params=None):
logging.info("Querying NFDomains endpoint %s...", endpoint)
url = f"{base_url}/{endpoint}"
response = self.session.get(url, params=params)
return response.json(), response.status_code
| [
"logging.info",
"requests.Session"
] | [((218, 236), 'requests.Session', 'requests.Session', ([], {}), '()\n', (234, 236), False, 'import requests\n'), ((883, 942), 'logging.info', 'logging.info', (['"""Querying NFDomains endpoint %s..."""', 'endpoint'], {}), "('Querying NFDomains endpoint %s...', endpoint)\n", (895, 942), False, 'import logging\n')] |
# -*- coding: utf-8
from django.contrib import admin
from ancfindersite.models import *
@admin.register(CommissionerInfo)
class CommissionerInfoAdmin(admin.ModelAdmin):
list_display = ['id', 'latest', 'created', 'author', 'anc', 'smd', 'field_name', 'field_value', 'linkage']
raw_id_fields = ['author']
readonly_fields = ['author', 'superseded_by', 'anc', 'smd', 'field_name']
def latest(self, obj):
return obj.superseded_by is None
latest.boolean = True
def linkage(self, obj):
ret = []
if obj.superseded_by is not None: ret.append("next: %d" % obj.superseded_by.id)
try:
ret.append("prev: %d" % obj.supersedes.id)
except:
# obj.supersedes doesn't return None, instead it raises a DoesNotExist exception.
pass
return "; ".join(ret)
admin.site.register(Document)
| [
"django.contrib.admin.register",
"django.contrib.admin.site.register"
] | [((92, 124), 'django.contrib.admin.register', 'admin.register', (['CommissionerInfo'], {}), '(CommissionerInfo)\n', (106, 124), False, 'from django.contrib import admin\n'), ((810, 839), 'django.contrib.admin.site.register', 'admin.site.register', (['Document'], {}), '(Document)\n', (829, 839), False, 'from django.contrib import admin\n')] |
# Author: <NAME>
# Date: January 29, 2017
import tornado.ioloop
import tornado.web
import tornado.httpserver
import hashlib
import base64
import json
import mysql.connector as sql
dbuser = 'csse'
# Register a new user
class UserHandler(tornado.web.RequestHandler):
def set_default_headers(self):
self.set_header("Access-Control-Allow-Origin", "*")
self.set_header("Access-Control-Allow-Headers", "x-requested-with")
self.set_header('Access-Control-Allow-Methods', 'POST, GET')
self.set_header('Cache-Control', 'max-age=0,must-revalidate')
def post(self, username, password, person_name):
db = sql.connect(user=dbuser, database='wireshark', host='127.0.0.1')
cursor = db.cursor(buffered=True)
checkquery = "SELECT * FROM userinfo WHERE username = '" + username + "'"
insertquery = "INSERT INTO userinfo(username, password, person_name) VALUES('" + username + "', '" + password + "', '" + person_name + "')"
selectquery = "SELECT id FROM userinfo WHERE username = '" + username + "'"
try:
# import pdb; pdb.set_trace()
cursor.execute(checkquery)
res = cursor.fetchone()
if res is None:
cursor.execute(insertquery)
db.commit()
cursor.execute(selectquery)
idd = cursor.fetchone()[0]
print(idd)
resobj = {'id': idd}
db.commit()
self.write({ 'result': True, 'messsage': 'Successfully created user', 'data' : resobj})
else:
db.commit()
resobj = {'id': -1 }
self.write({ 'result': False, 'messsage': 'User already exists', 'data' : resobj})
except sql.Error as err:
db.rollback()
self.write({'result': False, 'message': 'Some weird error occured' })
db.close()
def get(self, username, password):
db = sql.connect(user=dbuser, database='wireshark', host='127.0.0.1')
cursor = db.cursor(buffered=True)
query = "SELECT * FROM userinfo WHERE username='" + username + "' " + "AND password='" + password + "'"
print(query)
try:
cursor.execute(query)
res = cursor.fetchone()
db.commit()
if res != None:
resobj = {'id': res[3]}
self.write({ 'result': True, 'message': 'Successfully logged in', 'data': resobj })
else:
resobj = {'id': -1 }
self.write({ 'result': False, 'message': 'Couldnt logged in. error.', 'data': resobj })
except sql.Error as err:
db.rollback()
self.write({'result': False, 'message': 'Some weird error occured' })
db.close()
# Post a new message
class MessageHandler(tornado.web.RequestHandler):
def set_default_headers(self):
self.set_header("Access-Control-Allow-Origin", "*")
self.set_header("Access-Control-Allow-Headers", "x-requested-with")
self.set_header('Access-Control-Allow-Methods', 'POST, GET')
self.set_header('Cache-Control', 'max-age=0,must-revalidate')
def post(self, uid, message):
db = sql.connect(user=dbuser, database='wireshark', host='127.0.0.1')
cursor = db.cursor(buffered=True)
selectquery = "SELECT username, password, person_name FROM userinfo WHERE id = '" + uid + "'"
print(selectquery)
try:
cursor.execute(selectquery)
name = cursor.fetchone()
print(name)
if name is not None:
insertquery = "INSERT INTO posts(person_name, id, message) VALUES('" + name[2] + "', '" + uid + "', '" + message + "')"
cursor.execute(insertquery)
db.commit()
resobj = { 'username': name[0], 'password': name[1] }
self.write({ 'result': True, 'messsage': 'Successfully created post', 'data': resobj })
else:
db.commit()
self.write({ 'result': False, 'messsage': 'wrong id breh!!' })
except sql.Error as err:
db.rollback()
self.write({'result': False, 'message': 'Some weird error occured' })
db.close()
def get(self):
# print(username)
# print(uid)
db = sql.connect(user=dbuser, database='wireshark', host='127.0.0.1')
cursor = db.cursor(buffered=True)
query = "SELECT * FROM posts LIMIT 20"
print(query)
try:
cursor.execute(query)
res = cursor.fetchall()
retobj = []
for item in res:
data = { 'person_name': item[0], 'post': item[2] }
retobj.append(data)
# resobj = {'username': res[0], 'id': res[3]}
db.commit()
if res != None:
self.write({ 'result': True, 'message': 'Got these posts.', 'data': retobj })
else:
self.write({ 'result': False, 'message': 'Couldnt logged in. error.' })
except sql.Error as err:
db.rollback()
self.write({'result': False, 'message': 'Some weird error occured' })
db.close()
class ProxyMainHandler(tornado.web.RequestHandler):
def set_default_headers(self):
self.set_header("Access-Control-Allow-Origin", "*")
self.set_header("Access-Control-Allow-Headers", "x-requested-with")
self.set_header('Access-Control-Allow-Methods', 'POST, GET')
def get(self, numb):
print('input was: ' + numb)
num = int(numb)
if num == 47:
decodedmsg = 'It is a period of civil war. Rebel spaceships, striking from a hidden base, have won their first victory against the evil Galactic Empire.\nDuring the battle, Rebel spies managed to steal secret plans to the Empire\'s ultimate weapon, the DEATH STAR, an armored space station with enough power to destroy an entire planet.\nPursued by the Empire\'s sinister agents, <NAME> races home aboard her starship, custodian of the stolen plans that can save her people and restore freedom to the galaxy....'
msg = base64.b64encode(decodedmsg)
self.write({ "result": True, "key": msg })
else:
msg = base64.b64encode('Not the secret buddy! Haha try again!')
self.write({ "result": False, "key": msg })
class SharkHandler(tornado.web.RequestHandler):
def get(self):
self.render('sharkclient.html')
class ProxyHandler(tornado.web.RequestHandler):
def get(self):
self.render('client.html')
class ClubHandler(tornado.web.RequestHandler):
def get(self):
self.add_header('CSSE290_CLASS', 'Nu! Lbh znqr vg guvf sne. Jryy, gurerf abguvat zber. Gur frperg vf: v nz njrfbzr.')
self.render('webactivity.html')
def make_app():
return tornado.web.Application([
(r"/shark", SharkHandler),
(r"/shark/signup/username/([^/]*)/password/([^/]*)/person_name/([^/]*)", UserHandler),
(r"/shark/login/username/([^/]*)/password/([^/]*)", UserHandler),
(r"/shark/message/id/([^/]*)/message/([^/]*)", MessageHandler),
(r"/shark/message/get20", MessageHandler),
(r"/proxy", ProxyHandler),
(r"/webactivity", ClubHandler),
(r"/proxy/([^/]*)", ProxyMainHandler),
])
if __name__ == "__main__":
app = tornado.httpserver.HTTPServer(make_app())
app.listen(8888)
tornado.ioloop.IOLoop.current().start()
| [
"mysql.connector.connect",
"base64.b64encode"
] | [((621, 685), 'mysql.connector.connect', 'sql.connect', ([], {'user': 'dbuser', 'database': '"""wireshark"""', 'host': '"""127.0.0.1"""'}), "(user=dbuser, database='wireshark', host='127.0.0.1')\n", (632, 685), True, 'import mysql.connector as sql\n'), ((1802, 1866), 'mysql.connector.connect', 'sql.connect', ([], {'user': 'dbuser', 'database': '"""wireshark"""', 'host': '"""127.0.0.1"""'}), "(user=dbuser, database='wireshark', host='127.0.0.1')\n", (1813, 1866), True, 'import mysql.connector as sql\n'), ((2951, 3015), 'mysql.connector.connect', 'sql.connect', ([], {'user': 'dbuser', 'database': '"""wireshark"""', 'host': '"""127.0.0.1"""'}), "(user=dbuser, database='wireshark', host='127.0.0.1')\n", (2962, 3015), True, 'import mysql.connector as sql\n'), ((3954, 4018), 'mysql.connector.connect', 'sql.connect', ([], {'user': 'dbuser', 'database': '"""wireshark"""', 'host': '"""127.0.0.1"""'}), "(user=dbuser, database='wireshark', host='127.0.0.1')\n", (3965, 4018), True, 'import mysql.connector as sql\n'), ((5651, 5679), 'base64.b64encode', 'base64.b64encode', (['decodedmsg'], {}), '(decodedmsg)\n', (5667, 5679), False, 'import base64\n'), ((5763, 5820), 'base64.b64encode', 'base64.b64encode', (['"""Not the secret buddy! Haha try again!"""'], {}), "('Not the secret buddy! Haha try again!')\n", (5779, 5820), False, 'import base64\n')] |
from typing import List, Tuple
from objects.TypedDicts import TypedPresence, TypedReadMatch
from objects.constants.GameModes import GameModes
from objects.constants.Modificators import Mods
from objects.constants.Slots import SlotStatus, SlotTeams
from objects.constants.multiplayer import MatchTypes, MatchScoringTypes, MatchTeamTypes, MultiSpecialModes
from objects.Multiplayer import Slot
from objects.BanchoObjects import Message
from packets.Reader.index import KurisoBuffer
class PacketResolver:
@staticmethod
async def read_new_presence(data: bytes) -> TypedPresence:
buffer = KurisoBuffer(None)
await buffer.write_to_buffer(data)
return {
'action': await buffer.read_byte(),
'action_text': await buffer.read_osu_string(),
'map_md5': await buffer.read_osu_string(),
'mods': await buffer.read_u_int_32(),
'mode': await buffer.read_byte(),
'map_id': await buffer.read_int_32()
}
@staticmethod
async def read_request_users_stats(data: bytes) -> List[int]:
buffer = KurisoBuffer(None)
await buffer.write_to_buffer(data)
return await buffer.read_i32_list()
@staticmethod
async def read_pr_filter(data: bytes) -> int:
buffer = KurisoBuffer(None)
await buffer.write_to_buffer(data)
return await buffer.read_int_32()
@staticmethod
async def read_slot_index(data: bytes) -> int:
buffer = KurisoBuffer(None)
await buffer.write_to_buffer(data)
return await buffer.read_int_32()
@staticmethod
async def read_message(data: bytes) -> Message:
buffer = KurisoBuffer(None)
await buffer.write_to_buffer(data)
return Message(
sender=await buffer.read_osu_string(),
body=await buffer.read_osu_string(),
to=await buffer.read_osu_string(),
client_id=await buffer.read_int_32()
)
@staticmethod
async def read_channel_name(data: bytes) -> str:
buffer = KurisoBuffer(None)
await buffer.write_to_buffer(data)
return await buffer.read_osu_string()
@staticmethod
async def read_specatator_id(data: bytes) -> int:
buffer = KurisoBuffer(None)
await buffer.write_to_buffer(data)
return await buffer.read_int_32()
@staticmethod
async def read_friend_id(data: bytes) -> int:
buffer = KurisoBuffer(None)
await buffer.write_to_buffer(data)
return await buffer.read_int_32()
@staticmethod
async def read_match(data: bytes) -> TypedReadMatch:
buffer = KurisoBuffer(None)
await buffer.write_to_buffer(data)
await buffer.read_int_16() # skip 3 bytes for id and inProgress because default is False
await buffer.read_byte()
match_type = MatchTypes(await buffer.read_byte())
mods = Mods(await buffer.read_int_32())
name = await buffer.read_osu_string()
password = await buffer.read_osu_string()
beatmap_name = await buffer.read_osu_string()
beatmap_id = await buffer.read_int_32()
beatmap_md5 = await buffer.read_osu_string()
slots = [Slot() for _ in range(0, 16)] # make slots
for slot in slots:
slot.status = SlotStatus(await buffer.read_byte())
for slot in slots:
slot.team = SlotTeams(await buffer.read_byte())
for slot in slots:
if slot.status.value & SlotStatus.HasPlayer:
await buffer.read_int_32()
host_id = await buffer.read_int_32()
play_mode = GameModes(await buffer.read_byte())
scoring_type = MatchScoringTypes(await buffer.read_byte())
team_type = MatchTeamTypes(await buffer.read_byte())
is_freemod = await buffer.read_bool()
match_freemod = MultiSpecialModes(int(is_freemod))
if is_freemod:
for slot in slots:
slot.mods = Mods(await buffer.read_int_32())
seed = await buffer.read_int_32()
t_dict = {
'match_type': match_type,
'mods': mods,
'name': name,
'password': password,
'beatmap_name': beatmap_name,
'beatmap_id': beatmap_id,
'beatmap_md5': beatmap_md5,
'slots': slots,
'host_id': host_id,
'play_mode': play_mode,
'scoring_type': scoring_type,
'team_type': team_type,
'match_freemod': match_freemod,
'seed': seed
}
return t_dict
@staticmethod
async def read_mp_join_data(data: bytes) -> Tuple[int, str]:
buffer = KurisoBuffer(None)
await buffer.write_to_buffer(data)
return await buffer.read_int_32(), await buffer.read_osu_string()
@staticmethod
async def read_mods(data: bytes) -> int:
buffer = KurisoBuffer(None)
await buffer.write_to_buffer(data)
return await buffer.read_int_32()
@staticmethod
async def read_user_id(data: bytes) -> int:
buffer = KurisoBuffer(None)
await buffer.write_to_buffer(data)
return await buffer.read_int_32()
@staticmethod
async def read_match_id(data: bytes) -> int:
buffer = KurisoBuffer(None)
await buffer.write_to_buffer(data)
return await buffer.read_int_32()
| [
"packets.Reader.index.KurisoBuffer",
"objects.Multiplayer.Slot"
] | [((605, 623), 'packets.Reader.index.KurisoBuffer', 'KurisoBuffer', (['None'], {}), '(None)\n', (617, 623), False, 'from packets.Reader.index import KurisoBuffer\n'), ((1103, 1121), 'packets.Reader.index.KurisoBuffer', 'KurisoBuffer', (['None'], {}), '(None)\n', (1115, 1121), False, 'from packets.Reader.index import KurisoBuffer\n'), ((1295, 1313), 'packets.Reader.index.KurisoBuffer', 'KurisoBuffer', (['None'], {}), '(None)\n', (1307, 1313), False, 'from packets.Reader.index import KurisoBuffer\n'), ((1486, 1504), 'packets.Reader.index.KurisoBuffer', 'KurisoBuffer', (['None'], {}), '(None)\n', (1498, 1504), False, 'from packets.Reader.index import KurisoBuffer\n'), ((1678, 1696), 'packets.Reader.index.KurisoBuffer', 'KurisoBuffer', (['None'], {}), '(None)\n', (1690, 1696), False, 'from packets.Reader.index import KurisoBuffer\n'), ((2059, 2077), 'packets.Reader.index.KurisoBuffer', 'KurisoBuffer', (['None'], {}), '(None)\n', (2071, 2077), False, 'from packets.Reader.index import KurisoBuffer\n'), ((2257, 2275), 'packets.Reader.index.KurisoBuffer', 'KurisoBuffer', (['None'], {}), '(None)\n', (2269, 2275), False, 'from packets.Reader.index import KurisoBuffer\n'), ((2447, 2465), 'packets.Reader.index.KurisoBuffer', 'KurisoBuffer', (['None'], {}), '(None)\n', (2459, 2465), False, 'from packets.Reader.index import KurisoBuffer\n'), ((2644, 2662), 'packets.Reader.index.KurisoBuffer', 'KurisoBuffer', (['None'], {}), '(None)\n', (2656, 2662), False, 'from packets.Reader.index import KurisoBuffer\n'), ((4701, 4719), 'packets.Reader.index.KurisoBuffer', 'KurisoBuffer', (['None'], {}), '(None)\n', (4713, 4719), False, 'from packets.Reader.index import KurisoBuffer\n'), ((4918, 4936), 'packets.Reader.index.KurisoBuffer', 'KurisoBuffer', (['None'], {}), '(None)\n', (4930, 4936), False, 'from packets.Reader.index import KurisoBuffer\n'), ((5106, 5124), 'packets.Reader.index.KurisoBuffer', 'KurisoBuffer', (['None'], {}), '(None)\n', (5118, 5124), False, 'from packets.Reader.index import KurisoBuffer\n'), ((5295, 5313), 'packets.Reader.index.KurisoBuffer', 'KurisoBuffer', (['None'], {}), '(None)\n', (5307, 5313), False, 'from packets.Reader.index import KurisoBuffer\n'), ((3216, 3222), 'objects.Multiplayer.Slot', 'Slot', ([], {}), '()\n', (3220, 3222), False, 'from objects.Multiplayer import Slot\n')] |
# coding: utf-8
import re
def preg_replace_callback(pattern, callback, subject):
return re.sub(pattern, callback, subject)
if __name__ == '__main__':
text = 'April fools day is 04/01/2002\n' + \
'Last christmas was 12/24/2001\n'
print(preg_replace_callback(r'(\d{2}/\d{2}/)(\d{4})', lambda m: m.group(1) + str(int(m.group(2)) + 1), text))
| [
"re.sub"
] | [((95, 129), 're.sub', 're.sub', (['pattern', 'callback', 'subject'], {}), '(pattern, callback, subject)\n', (101, 129), False, 'import re\n')] |
"""Example of a Web Scraper using Regular Expressions"""
# Dependencies
import re
# Data
text_1 = "crypto-bot that is trading Bitcoin and other currencies"
text_2 = "cryptographic encryption methods that can be cracked easily with quantum computers"
# One-Liner
pattern = re.compile("crypto(.{1,30})coin")
# Result
print(pattern.match(text_1))
print(pattern.match(text_2)) | [
"re.compile"
] | [((275, 308), 're.compile', 're.compile', (['"""crypto(.{1,30})coin"""'], {}), "('crypto(.{1,30})coin')\n", (285, 308), False, 'import re\n')] |
from django.contrib.auth.decorators import login_required
from django.shortcuts import render, get_object_or_404, redirect
from django.http import HttpResponse, JsonResponse
from selenium import webdriver
from webdriver_manager.chrome import ChromeDriverManager
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium.webdriver.remote.webelement import WebElement
from selenium.webdriver.common.action_chains import ActionChains
from app.models import ItalianName, City
import csv
import os
@login_required(login_url="/login/")
def scrape_data(request):
scrape_name = request.GET.get('scrape_name', None)
start_id = request.GET.get('start_id', None)
end_id = request.GET.get('end_id', None)
option = webdriver.ChromeOptions()
option.add_argument("window-size=1280,800")
# option.add_argument("--headless")
# Setup wait for later
# wait = WebDriverWait(driver, 10)
driver = ''
url = ''
id = int(start_id)
while(id <= int(end_id)):
csv_file = "csv_files/"+scrape_name+"_"+str(id)+".csv"
if os.path.exists(csv_file):
os.remove(csv_file)
try:
CLICK_NEXT_BUTTON = True
CLICK_NEXT_BUTTON_NUM = 0;
aaaaaaaaaa = 0
while (CLICK_NEXT_BUTTON):
driver = webdriver.Chrome(ChromeDriverManager().install(), options=option)
driver.set_window_position(0, 0)
# driver.set_window_size(0, 0)
url = 'https://sfera.sferabit.com/servizi/alboonlineBoot/index.php?id='+str(id)
driver.get(url)
original_window = driver.current_window_handle
# Check we don't have other windows open already
assert len(driver.window_handles) == 1
driver.switch_to.window(driver.window_handles[0])
driver.implicitly_wait(10)
name_input = driver.find_element(By.ID, 'filtroRagioneSociale')
name_input.send_keys(scrape_name)
# ActionChains(driver).move_to_element(name_input).key_up('Francesco', name_input).perform()
# ActionChains(driver).move_to_element(name_input).send_keys(name_input, 'Francesco').perform()
# ActionChains(driver).move_to_element(name_input).key_down(Keys.CONTROL).send_keys('Francesco').key_up(Keys.CONTROL).perform()
send_button = driver.find_element(By.CSS_SELECTOR, 'button.btn-primary')
ActionChains(driver).move_to_element(send_button).click(send_button).perform()
driver.implicitly_wait(10)
if (CLICK_NEXT_BUTTON):
for i in range(CLICK_NEXT_BUTTON_NUM):
next_button = driver.find_elements(By.CSS_SELECTOR, '#risultatoRicerca a')[2]
ActionChains(driver).move_to_element(next_button).click(next_button).perform()
driver.implicitly_wait(10)
pros_num_table = driver.find_element(By.CSS_SELECTOR, '#risultatoRicerca>table')
pros_num_td = pros_num_table.find_elements(By.TAG_NAME, 'td')[1].get_attribute('innerHTML').strip().split(' ')
pros_now_num = pros_num_td[0].strip()
pros_total_num = pros_num_td[2].strip()
print(pros_now_num)
print(pros_total_num)
if(pros_total_num in pros_now_num):
CLICK_NEXT_BUTTON = False
else:
CLICK_NEXT_BUTTON = True
modal_buttons = driver.find_elements(By.CSS_SELECTOR, 'button.buttonAnagrafica')
for modal_button in modal_buttons:
ActionChains(driver).move_to_element(modal_button).click(modal_button).perform()
driver.implicitly_wait(30)
fonts = driver.find_elements(By.CSS_SELECTOR, '#modalPersona td')
address = name = birth = email = pec = tel = phone = ''
if(len(fonts) > 5):
use_nums = [2, 3, 5]
first_pros = []
result_pros = []
for use_num in use_nums:
first_pros.extend(fonts[use_num].get_attribute("innerHTML").strip().split("<br>"))
for first_pro in first_pros:
temp = first_pro.replace(" ", " ").strip()
temp1 = temp.lower()
temp_flag = ('foro di appartenenza' in temp1) or (('data' in temp1) and ('nascita' in temp1)) or ('email' in temp1) or ('pec' in temp1) or ('tel' in temp1) or ('cell' in temp1)
if(temp_flag):
result_pros.append(temp)
for result_pro in result_pros:
if(":" in result_pro):
rows = result_pro.split(":")
temp = rows[0].lower()
if("foro di appartenenza" in temp):
address = rows[1].strip()
if("data" in temp):
birth = rows[1].strip()
if("email" in temp):
email = result_pro.split(">")[1].strip()[:-3]
if("pec" in temp):
pec = result_pro.split(">")[1].strip()[:-3]
if("tel" in temp):
tel = rows[1].strip()
if("cell" in temp):
phone = rows[1].strip()
name = fonts[1].get_attribute("innerHTML").split("<b>")[1].strip().split("</b>")[0].strip()
print(address)
print(name)
print(birth)
print(email)
print(pec)
print(tel)
print(phone)
with open(csv_file, mode='a') as employee_file:
employee_writer = csv.writer(employee_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL)
employee_writer.writerow([address, name, birth, email, pec, tel, phone])
aaaaaaaaaa = aaaaaaaaaa + 1
print(aaaaaaaaaa)
close_buttons = driver.find_element(By.CSS_SELECTOR, "button.btn-secondary")
driver.implicitly_wait(10)
ActionChains(driver).move_to_element(close_buttons).click(close_buttons).perform()
driver.implicitly_wait(10)
CLICK_NEXT_BUTTON_NUM = CLICK_NEXT_BUTTON_NUM + 1
print(CLICK_NEXT_BUTTON)
print(CLICK_NEXT_BUTTON_NUM)
driver.implicitly_wait(10)
driver.quit()
except:
driver.quit()
print("id="+str(id)+" is not support.")
continue
finally:
# driver.implicitly_wait(10)
# driver.quit()
id = id + 1
print(str(id))
data = {
'is_taken': 'sdfsdfsdfsdfsdf'
}
return JsonResponse(data)
@login_required(login_url="/login/")
def scrape_aaa(request):
ItalianName.objects.all().delete()
# username = request.GET.get('username', None)
option = webdriver.ChromeOptions()
option.add_argument("window-size=1280,800")
# option.add_argument("--headless")
driver = webdriver.Chrome(ChromeDriverManager().install(), options=option)
driver.set_window_position(0, 0)
driver.get('https://www.nomix.it/nomi-italiani-maschili-e-femminili.php')
urls = {'https://www.nomix.it/nomi-italiani-maschili-e-femminili.php', 'https://www.nomix.it/nomi-italiani-lettera-B.php', 'https://www.nomix.it/nomi-italiani-lettera-C.php', 'https://www.nomix.it/nomi-italiani-lettera-D.php', 'https://www.nomix.it/nomi-italiani-lettera-E.php', 'https://www.nomix.it/nomi-italiani-lettera-F.php', 'https://www.nomix.it/nomi-italiani-lettera-G.php', 'https://www.nomix.it/nomi-italiani-lettera-I.php', 'https://www.nomix.it/nomi-italiani-lettera-L.php', 'https://www.nomix.it/nomi-italiani-lettera-M.php', 'https://www.nomix.it/nomi-italiani-lettera-NO.php', 'https://www.nomix.it/nomi-italiani-lettera-PQ.php', 'https://www.nomix.it/nomi-italiani-lettera-R.php', 'https://www.nomix.it/nomi-italiani-lettera-S.php', 'https://www.nomix.it/nomi-italiani-lettera-TUV.php', 'https://www.nomix.it/nomi-italiani-lettera-WZ.php'}
for url in urls:
driver.get(url)
driver.implicitly_wait(10)
original_window = driver.current_window_handle
# Check we don't have other windows open already
assert len(driver.window_handles) == 1
driver.switch_to.window(driver.window_handles[0])
tables = driver.find_elements(By.TAG_NAME, 'table')
male_tds = tables[2].find_elements(By.TAG_NAME, 'td')
for male_td in male_tds:
td_content = male_td.get_attribute('innerHTML')
if('<div' not in td_content):
if('strong' in td_content):
td_content = td_content.replace(' ', '')
td_content = td_content.replace('<strong>', '')
td_content = td_content.replace('</strong>', '').strip()
else:
td_content = td_content.replace(' ', '')
td_content = td_content.strip()
print(td_content)
italy_name = ItalianName(names_col=td_content, gender_col='male')
italy_name.save()
# italy_name.names_col = td_content
# italy_name.gender_col = 'male'
# italy_name.save()
male_tds = tables[3].find_elements(By.TAG_NAME, 'td')
for male_td in male_tds:
td_content = male_td.get_attribute('innerHTML')
if('<div' not in td_content):
if('strong' in td_content):
td_content = td_content.replace(' ', '')
td_content = td_content.replace('<strong>', '')
td_content = td_content.replace('</strong>', '').strip()
else:
td_content = td_content.replace(' ', '')
td_content = td_content.strip()
print(td_content)
italy_name = ItalianName(names_col=td_content, gender_col='male')
italy_name.save()
driver.implicitly_wait(10)
driver.quit()
data = {
'is_taken': 'sdfsdfsdfsdfsdf'
}
return JsonResponse(data)
@login_required(login_url="/login/")
def scrape_name(request):
# username = request.GET.get('username', None)
i = 1080
driver = ''
while(i<=1086):
try:
option = webdriver.ChromeOptions()
option.add_argument("window-size=1280,800")
# option.add_argument("--headless")
driver = webdriver.Chrome(ChromeDriverManager().install(), options=option)
driver.get('https://sfera.sferabit.com/servizi/alboonlineBoot/index.php?id='+str(i))
original_window = driver.current_window_handle
# Check we don't have other windows open already
assert len(driver.window_handles) == 1
driver.switch_to.window(driver.window_handles[0])
name_input = driver.find_element(By.ID, 'filtroRagioneSociale')
name_input.send_keys('an')
send_button = driver.find_element(By.CSS_SELECTOR, 'button.btn-primary')
ActionChains(driver).move_to_element(send_button).click(send_button).perform()
driver.implicitly_wait(10)
td = driver.find_element(By.CSS_SELECTOR, '#risultatoRicerca>table.table tr>td:last-child').get_attribute('innerHTML')
td = td.split(" ")[-2].strip()
city = City(url_id=i, city_name=td)
city.save()
except:
print(str(i)+" does not support")
finally:
i = i + 1
driver.quit()
data = {
'is_taken': 'sdfsdfsdfsdfsdf'
}
return JsonResponse(data) | [
"os.path.exists",
"selenium.webdriver.ChromeOptions",
"app.models.ItalianName.objects.all",
"app.models.ItalianName",
"django.http.JsonResponse",
"app.models.City",
"csv.writer",
"selenium.webdriver.common.action_chains.ActionChains",
"django.contrib.auth.decorators.login_required",
"webdriver_man... | [((617, 652), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/login/"""'}), "(login_url='/login/')\n", (631, 652), False, 'from django.contrib.auth.decorators import login_required\n'), ((7585, 7620), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/login/"""'}), "(login_url='/login/')\n", (7599, 7620), False, 'from django.contrib.auth.decorators import login_required\n'), ((11036, 11071), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/login/"""'}), "(login_url='/login/')\n", (11050, 11071), False, 'from django.contrib.auth.decorators import login_required\n'), ((841, 866), 'selenium.webdriver.ChromeOptions', 'webdriver.ChromeOptions', ([], {}), '()\n', (864, 866), False, 'from selenium import webdriver\n'), ((7563, 7581), 'django.http.JsonResponse', 'JsonResponse', (['data'], {}), '(data)\n', (7575, 7581), False, 'from django.http import HttpResponse, JsonResponse\n'), ((7749, 7774), 'selenium.webdriver.ChromeOptions', 'webdriver.ChromeOptions', ([], {}), '()\n', (7772, 7774), False, 'from selenium import webdriver\n'), ((11016, 11034), 'django.http.JsonResponse', 'JsonResponse', (['data'], {}), '(data)\n', (11028, 11034), False, 'from django.http import HttpResponse, JsonResponse\n'), ((12551, 12569), 'django.http.JsonResponse', 'JsonResponse', (['data'], {}), '(data)\n', (12563, 12569), False, 'from django.http import HttpResponse, JsonResponse\n'), ((1182, 1206), 'os.path.exists', 'os.path.exists', (['csv_file'], {}), '(csv_file)\n', (1196, 1206), False, 'import os\n'), ((1220, 1239), 'os.remove', 'os.remove', (['csv_file'], {}), '(csv_file)\n', (1229, 1239), False, 'import os\n'), ((7650, 7675), 'app.models.ItalianName.objects.all', 'ItalianName.objects.all', ([], {}), '()\n', (7673, 7675), False, 'from app.models import ItalianName, City\n'), ((11232, 11257), 'selenium.webdriver.ChromeOptions', 'webdriver.ChromeOptions', ([], {}), '()\n', (11255, 11257), False, 'from selenium import webdriver\n'), ((12303, 12331), 'app.models.City', 'City', ([], {'url_id': 'i', 'city_name': 'td'}), '(url_id=i, city_name=td)\n', (12307, 12331), False, 'from app.models import ItalianName, City\n'), ((7893, 7914), 'webdriver_manager.chrome.ChromeDriverManager', 'ChromeDriverManager', ([], {}), '()\n', (7912, 7914), False, 'from webdriver_manager.chrome import ChromeDriverManager\n'), ((9929, 9981), 'app.models.ItalianName', 'ItalianName', ([], {'names_col': 'td_content', 'gender_col': '"""male"""'}), "(names_col=td_content, gender_col='male')\n", (9940, 9981), False, 'from app.models import ItalianName, City\n'), ((10808, 10860), 'app.models.ItalianName', 'ItalianName', ([], {'names_col': 'td_content', 'gender_col': '"""male"""'}), "(names_col=td_content, gender_col='male')\n", (10819, 10860), False, 'from app.models import ItalianName, City\n'), ((11400, 11421), 'webdriver_manager.chrome.ChromeDriverManager', 'ChromeDriverManager', ([], {}), '()\n', (11419, 11421), False, 'from webdriver_manager.chrome import ChromeDriverManager\n'), ((1437, 1458), 'webdriver_manager.chrome.ChromeDriverManager', 'ChromeDriverManager', ([], {}), '()\n', (1456, 1458), False, 'from webdriver_manager.chrome import ChromeDriverManager\n'), ((6416, 6503), 'csv.writer', 'csv.writer', (['employee_file'], {'delimiter': '""","""', 'quotechar': '"""\\""""', 'quoting': 'csv.QUOTE_MINIMAL'}), '(employee_file, delimiter=\',\', quotechar=\'"\', quoting=csv.\n QUOTE_MINIMAL)\n', (6426, 6503), False, 'import csv\n'), ((11992, 12012), 'selenium.webdriver.common.action_chains.ActionChains', 'ActionChains', (['driver'], {}), '(driver)\n', (12004, 12012), False, 'from selenium.webdriver.common.action_chains import ActionChains\n'), ((2602, 2622), 'selenium.webdriver.common.action_chains.ActionChains', 'ActionChains', (['driver'], {}), '(driver)\n', (2614, 2622), False, 'from selenium.webdriver.common.action_chains import ActionChains\n'), ((3876, 3896), 'selenium.webdriver.common.action_chains.ActionChains', 'ActionChains', (['driver'], {}), '(driver)\n', (3888, 3896), False, 'from selenium.webdriver.common.action_chains import ActionChains\n'), ((2966, 2986), 'selenium.webdriver.common.action_chains.ActionChains', 'ActionChains', (['driver'], {}), '(driver)\n', (2978, 2986), False, 'from selenium.webdriver.common.action_chains import ActionChains\n'), ((6870, 6890), 'selenium.webdriver.common.action_chains.ActionChains', 'ActionChains', (['driver'], {}), '(driver)\n', (6882, 6890), False, 'from selenium.webdriver.common.action_chains import ActionChains\n')] |
from pyntcloud import PyntCloud
import pyembree
import numpy as np
import trimesh
from trimesh import sample, ray, triangles
from trimesh.ray.ray_pyembree import RayMeshIntersector
import pandas as pd
cloud = PyntCloud.from_file("/home/mjia/Documents/ShapeCompletion/test.ply")
sample = cloud.get_sample(name='mesh_random_sampling', as_PyntCloud=True, n=1024)
sample.plot(mesh=True)
mesh = trimesh.load('../ModelNet40/desk/train/desk_0008.off')
mesh.show() | [
"trimesh.load",
"trimesh.sample.plot",
"pyntcloud.PyntCloud.from_file"
] | [((211, 279), 'pyntcloud.PyntCloud.from_file', 'PyntCloud.from_file', (['"""/home/mjia/Documents/ShapeCompletion/test.ply"""'], {}), "('/home/mjia/Documents/ShapeCompletion/test.ply')\n", (230, 279), False, 'from pyntcloud import PyntCloud\n'), ((362, 384), 'trimesh.sample.plot', 'sample.plot', ([], {'mesh': '(True)'}), '(mesh=True)\n', (373, 384), False, 'from trimesh import sample, ray, triangles\n'), ((393, 447), 'trimesh.load', 'trimesh.load', (['"""../ModelNet40/desk/train/desk_0008.off"""'], {}), "('../ModelNet40/desk/train/desk_0008.off')\n", (405, 447), False, 'import trimesh\n')] |
"""
# License
Each contributor holds copyright over their contributions to Caffe-Tensorflow. In particular:
- Any included network model is provided under its original license.
- Any portion derived from Caffe is provided under its original license.
- Caffe-tensorflow is provided under the MIT license, as specified below.
# The MIT License (MIT)
Copyright (c) 2016 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import sys
SHARED_CAFFE_RESOLVER = None
class CaffeResolver(object):
def __init__(self):
self.import_caffe()
def import_caffe(self):
self.caffe = None
try:
# Try to import PyCaffe first
import caffe
self.caffe = caffe
except ImportError:
# Fall back to the protobuf implementation
from . import caffepb
self.caffepb = caffepb
show_fallback_warning()
if self.caffe:
# Use the protobuf code from the imported distribution.
# This way, Caffe variants with custom layers will work.
self.caffepb = self.caffe.proto.caffe_pb2
self.NetParameter = self.caffepb.NetParameter
def has_pycaffe(self):
return self.caffe is not None
def get_caffe_resolver():
global SHARED_CAFFE_RESOLVER
if SHARED_CAFFE_RESOLVER is None:
SHARED_CAFFE_RESOLVER = CaffeResolver()
return SHARED_CAFFE_RESOLVER
def has_pycaffe():
return get_caffe_resolver().has_pycaffe()
def show_fallback_warning():
msg = '''
------------------------------------------------------------
WARNING: PyCaffe not found!
Falling back to a pure protocol buffer implementation.
* Conversions will be drastically slower.
* This backend is UNTESTED!
------------------------------------------------------------
'''
sys.stderr.write(msg)
| [
"sys.stderr.write"
] | [((2809, 2830), 'sys.stderr.write', 'sys.stderr.write', (['msg'], {}), '(msg)\n', (2825, 2830), False, 'import sys\n')] |
import numpy as np
import gym
poleThetaSpace = np.linspace(-0.209, 0.209, 10)
poleThetaVelSpace = np.linspace(-4, 4, 10)
cartPosSpace = np.linspace(-2.4, 2.4, 10)
cartVelSpace = np.linspace(-4, 4, 10)
def get_state(observation):
cartX, cartXdot, cartTheta, cartThetaDot = observation
cartX = int(np.digitize(cartX, cartPosSpace))
cartXdot = int(np.digitize(cartXdot, cartVelSpace))
cartTheta = int(np.digitize(cartTheta, poleThetaSpace))
cartThetaDot = int(np.digitize(cartThetaDot, poleThetaVelSpace))
return (cartX, cartXdot, cartTheta, cartThetaDot)
def choose_action(q, obs, eps, n_actions=2):
state = get_state(obs)
if np.random.random() < eps:
action = np.random.choice([i for i in range(n_actions)])
else:
action_values = [q[(state, a)] for a in range(n_actions)]
action = np.argmax(action_values)
return action
if __name__ == '__main__':
env = gym.make('CartPole-v0')
alpha = 0.1
gamma = 0.9
epsilon = 1.0
states = []
for i in range(len(cartPosSpace)+1):
for j in range(len(cartVelSpace)+1):
for k in range(len(poleThetaSpace)+1):
for l in range(len(poleThetaVelSpace)+1):
states.append((i,j,k,l))
Q = {}
for s in states:
for a in range(2):
Q[(s, a)] = 0.0
n = 16
state_memory = np.zeros((n, 4))
action_memory = np.zeros(n)
reward_memory = np.zeros(n)
scores = []
n_episodes = 50000
for i in range(n_episodes):
done = False
score = 0
t = 0
T = np.inf
observation = env.reset()
action = choose_action(Q, observation, epsilon)
action_memory[t%n] = action
state_memory[t%n] = observation
while not done:
observation, reward, done, info = env.step(action)
score += reward
state_memory[(t+1)%n] = observation
reward_memory[(t+1)%n] = reward
if done:
T = t + 1
#print('episode ends at step', t)
action = choose_action(Q, observation, epsilon)
action_memory[(t+1)%n] = action
tau = t - n + 1
if tau >= 0:
G = [gamma**(j-tau-1)*reward_memory[j%n] \
for j in range(tau+1, min(tau+n, T)+1)]
G = np.sum(G)
if tau + n < T:
s = get_state(state_memory[(tau+n)%n])
a = int(action_memory[(tau+n)%n])
G += gamma**n * Q[(s,a)]
s = get_state(state_memory[tau%n])
a = action_memory[tau%n]
Q[(s,a)] += alpha*(G-Q[(s,a)])
#print('tau ', tau, '| Q %.2f' % \
# Q[(get_state(state_memory[tau%n]), action_memory[tau%n])])
t += 1
for tau in range(t-n+1, T):
G = [gamma**(j-tau-1)*reward_memory[j%n] \
for j in range(tau+1, min(tau+n, T)+1)]
G = np.sum(G)
if tau + n < T:
s = get_state(state_memory[(tau+n)%n])
a = int(action_memory[(tau+n)%n])
G += gamma**n * Q[(s,a)]
s = get_state(state_memory[tau%n])
a = action_memory[tau%n]
Q[(s,a)] += alpha*(G-Q[(s,a)])
#print('tau ', tau, '| Q %.2f' % \
# Q[(get_state(state_memory[tau%n]), action_memory[tau%n])])
scores.append(score)
avg_score = np.mean(scores[-1000:])
epsilon = epsilon -2 / n_episodes if epsilon > 0 else 0
if i % 1000 == 0:
print('episode ', i, 'avg_score %.1f' % avg_score,
'epsilon %.2f' % epsilon)
| [
"numpy.mean",
"numpy.digitize",
"numpy.random.random",
"numpy.argmax",
"numpy.sum",
"numpy.linspace",
"numpy.zeros",
"gym.make"
] | [((48, 78), 'numpy.linspace', 'np.linspace', (['(-0.209)', '(0.209)', '(10)'], {}), '(-0.209, 0.209, 10)\n', (59, 78), True, 'import numpy as np\n'), ((99, 121), 'numpy.linspace', 'np.linspace', (['(-4)', '(4)', '(10)'], {}), '(-4, 4, 10)\n', (110, 121), True, 'import numpy as np\n'), ((137, 163), 'numpy.linspace', 'np.linspace', (['(-2.4)', '(2.4)', '(10)'], {}), '(-2.4, 2.4, 10)\n', (148, 163), True, 'import numpy as np\n'), ((179, 201), 'numpy.linspace', 'np.linspace', (['(-4)', '(4)', '(10)'], {}), '(-4, 4, 10)\n', (190, 201), True, 'import numpy as np\n'), ((925, 948), 'gym.make', 'gym.make', (['"""CartPole-v0"""'], {}), "('CartPole-v0')\n", (933, 948), False, 'import gym\n'), ((1375, 1391), 'numpy.zeros', 'np.zeros', (['(n, 4)'], {}), '((n, 4))\n', (1383, 1391), True, 'import numpy as np\n'), ((1412, 1423), 'numpy.zeros', 'np.zeros', (['n'], {}), '(n)\n', (1420, 1423), True, 'import numpy as np\n'), ((1444, 1455), 'numpy.zeros', 'np.zeros', (['n'], {}), '(n)\n', (1452, 1455), True, 'import numpy as np\n'), ((306, 338), 'numpy.digitize', 'np.digitize', (['cartX', 'cartPosSpace'], {}), '(cartX, cartPosSpace)\n', (317, 338), True, 'import numpy as np\n'), ((359, 394), 'numpy.digitize', 'np.digitize', (['cartXdot', 'cartVelSpace'], {}), '(cartXdot, cartVelSpace)\n', (370, 394), True, 'import numpy as np\n'), ((416, 454), 'numpy.digitize', 'np.digitize', (['cartTheta', 'poleThetaSpace'], {}), '(cartTheta, poleThetaSpace)\n', (427, 454), True, 'import numpy as np\n'), ((479, 523), 'numpy.digitize', 'np.digitize', (['cartThetaDot', 'poleThetaVelSpace'], {}), '(cartThetaDot, poleThetaVelSpace)\n', (490, 523), True, 'import numpy as np\n'), ((660, 678), 'numpy.random.random', 'np.random.random', ([], {}), '()\n', (676, 678), True, 'import numpy as np\n'), ((844, 868), 'numpy.argmax', 'np.argmax', (['action_values'], {}), '(action_values)\n', (853, 868), True, 'import numpy as np\n'), ((3507, 3530), 'numpy.mean', 'np.mean', (['scores[-1000:]'], {}), '(scores[-1000:])\n', (3514, 3530), True, 'import numpy as np\n'), ((3024, 3033), 'numpy.sum', 'np.sum', (['G'], {}), '(G)\n', (3030, 3033), True, 'import numpy as np\n'), ((2370, 2379), 'numpy.sum', 'np.sum', (['G'], {}), '(G)\n', (2376, 2379), True, 'import numpy as np\n')] |
import urllib.request, urllib.parse, urllib.error
import xml.etree.ElementTree as ET
url= input('Enter - ')
data= urllib.request.urlopen(url).read().decode()
#print(type('data'))
#data ='''<commentinfo>tag</commentinfo>'''
#print("~~~",data)
c=0
commentsinfo = ET.fromstring(data)#starting tag. ie is "commentinfo" in this case
lst = commentsinfo.findall('comments/comment')#and thge path.ie "commentinfo/comments/comment"
#print("$$",type('lst'))
print('User count:', len(lst))
for item in lst:
#print('Name', item.find('name').text) #get the txt btw <name> tag
#print('count', item.find('count').text) #get the txt btw <count> tag
c=c+int(item.find('count').text)
print("sum :",c)
#http://py4e-data.dr-chuck.net/comments_42.xml in this case 2553
#http://py4e-data.dr-chuck.net/comments_967204.xml in this case 2212
| [
"xml.etree.ElementTree.fromstring"
] | [((263, 282), 'xml.etree.ElementTree.fromstring', 'ET.fromstring', (['data'], {}), '(data)\n', (276, 282), True, 'import xml.etree.ElementTree as ET\n')] |
"""
spaceshooter.py
Author: emBrileg08
Credit: Spacewar Source Code
www.pythoncentral.io for information on random number generation
Assignment:
Write and submit a program that implements the spacewar game:
https://github.com/HHS-IntroProgramming/Spacewar
"""
from ggame import App, Sprite, ImageAsset, Frame
import random
import math
print("Directions: Player 1 uses the left and right arrows to move the Spaceship, and the spacebar to fire. Player 2 uses the a and s keys to move the spaceship, and the z key to fire")
class Bullet1(Sprite):
def __init__(self,position,vx,vy):
asset=ImageAsset("images/blast.png", Frame(0,0,8,8), 8)
super().__init__(asset,position)
self.setImage(0)
self.fxcenter=self.fycenter=0.5
self.vx=vx
self.vy=vy
def step(self):
if self.visible!=False:
self.x+=self.vx
self.y+=self.vy
collision=self.collidingWithSprites(Sun)
collision2=self.collidingWithSprites(Spaceship2)
if collision:
self.visible=False
explosion(self.position)
if collision2:
self.visible=False
class Bullet2(Sprite):
def __init__(self,position,vx,vy):
asset=ImageAsset("images/blast.png", Frame(0,0,8,8), 8)
super().__init__(asset,position)
self.setImage(0)
self.fxcenter=self.fycenter=0.5
self.vx=vx
self.vy=vy
def step(self):
if self.visible!=False:
self.x+=self.vx
self.y+=self.vy
collision=self.collidingWithSprites(Sun)
collision2=self.collidingWithSprites(Spaceship)
if collision:
self.visible=False
explosion(self.position)
if collision2:
self.visible=False
class Background(Sprite):
def __init__(self,position):
asset=ImageAsset("images/starfield.jpg")
super().__init__(asset,position)
width=512
height=512
class Sun(Sprite):
def __init__(self,position):
asset=ImageAsset("images/sun.png")
super().__init__(asset,position)
if 0<self.x<200 and 0<self.y<200:
self.destroy()
elif 872<self.x and 350<self.y:
self.destroy()
class Spaceship2(Sprite):
def __init__(self, position):
asset=ImageAsset("images/four_spaceship_by_albertov_with_thrust.png",
Frame(227,0,65,125), 4, 'vertical')
super().__init__(asset, position)
self.fxcenter=self.fycenter=0.5
self.thrust = 0
self.right=0
self.left=0
self.shoot=0
self.angle=math.pi/2
self.thrustframe = 1
Spacewar.listenKeyEvent("keydown","a", self.rightOn)
Spacewar.listenKeyEvent("keyup","a", self.rightOff)
Spacewar.listenKeyEvent("keydown","s", self.leftOn)
Spacewar.listenKeyEvent("keyup","s", self.leftOff)
Spacewar.listenKeyEvent("keydown", "z", self.bulletOn)
Spacewar.listenKeyEvent("keyup","z",self.bulletOff)
self.visible=True
def step(self):
if self.visible!=False:
self.vx=-math.cos(math.pi-self.angle)
self.vy=-math.sin(math.pi-self.angle)
self.x+=self.vx
self.y+=self.vy
if self.thrust == 1:
self.setImage(self.thrustframe)
self.thrustframe += 1
if self.thrustframe == 4:
self.thrustframe = 1
else:
self.setImage(0)
if self.right==1:
self.rotation+=.02
self.angle+=.02
if self.left==1:
self.rotation-=.02
self.angle-=.02
if self.shoot==1:
Bullet2((self.x,self.y),2*self.vx,2*self.vy)
if self.visible:
collision=self.collidingWithSprites(Sun)
collision2=self.collidingWithSprites(Spaceship)
collision3=self.collidingWithSprites(Bullet1)
if collision or collision2 or collision3:
self.visible=False
explosion(self.position)
def rightOn(self, event):
self.thrust = 1
self.right=1
def rightOff(self, event):
self.thrust = 0
self.right=0
def leftOn(self, event):
self.thrust = 1
self.left=1
def leftOff(self, event):
self.thrust = 0
self.left=0
def bulletOn(self,event):
self.shoot=1
def bulletOff(self,event):
self.shoot=0
class Spaceship(Sprite):
def __init__(self, position):
asset=ImageAsset("images/four_spaceship_by_albertov_with_thrust.png",
Frame(227,0,65,125), 4, 'vertical')
super().__init__(asset, position)
self.fxcenter=self.fycenter=0.5
self.thrust = 0
self.right=0
self.left=0
self.shoot=0
self.angle=math.pi/2
self.thrustframe = 1
Spacewar.listenKeyEvent("keydown","right arrow", self.rightOn)
Spacewar.listenKeyEvent("keyup","right arrow", self.rightOff)
Spacewar.listenKeyEvent("keydown","left arrow", self.leftOn)
Spacewar.listenKeyEvent("keyup","left arrow", self.leftOff)
Spacewar.listenKeyEvent("keydown", "space", self.bulletOn)
Spacewar.listenKeyEvent("keyup","space",self.bulletOff)
self.visible=True
def step(self):
if self.visible!=False:
self.vx=-math.cos(math.pi-self.angle)
self.vy=-math.sin(math.pi-self.angle)
self.x+=self.vx
self.y+=self.vy
if self.thrust == 1:
self.setImage(self.thrustframe)
self.thrustframe += 1
if self.thrustframe == 4:
self.thrustframe = 1
else:
self.setImage(0)
if self.right==1:
self.rotation-=.02
self.angle-=.02
if self.left==1:
self.rotation+=.02
self.angle+=.02
if self.shoot==1:
Bullet1((self.x,self.y),2*self.vx,2*self.vy)
if self.visible:
collision=self.collidingWithSprites(Sun)
collision2=self.collidingWithSprites(Spaceship2)
collision3=self.collidingWithSprites(Bullet2)
if collision or collision2 or collision3:
self.visible=False
explosion(self.position)
def rightOn(self, event):
self.thrust = 1
self.right=1
def rightOff(self, event):
self.thrust = 0
self.right=0
def leftOn(self, event):
self.thrust = 1
self.left=1
def leftOff(self, event):
self.thrust = 0
self.left=0
def bulletOn(self,event):
self.shoot=1
def bulletOff(self,event):
self.shoot=0
class explosion(Sprite):
def __init__(self, position):
asset=ImageAsset("images/explosion2.png", Frame(0,0,4800/25,195), 25)
super().__init__(asset, position)
self.fxcenter=self.fycenter=0.5
self.expframe=1
def step(self):
self.setImage(self.expframe)
self.expframe += 1
if self.expframe == 26:
self.destroy()
class Spacewar(App):
def __init__(self):
super().__init__()
z=0
a=0
while z<self.width:
Background((z,0))
while a<self.height:
Background((z,a))
a+=Background.height
a=0
z+=Background.width
self.go=1
self.sp = Spaceship((100,100))
self.sp2 = Spaceship2((self.width-100,self.height-100))
for x in range(int(input("Set difficulty level, 0-10: "))):
Sun((random.randint(0,self.width),random.randint(0,self.height)))
self.ste=True
def step(self):
for exp in self.getSpritesbyClass(explosion):
exp.step()
for bullet in self.getSpritesbyClass(Bullet1):
bullet.step()
for bull in self.getSpritesbyClass(Bullet2):
bull.step()
if self.ste==True:
if self.sp.visible==False:
self.go=0
elif self.sp.x<-75:
self.go=0
elif self.sp.x>self.width+75:
self.go=0
elif self.sp.y<-75:
self.go=0
elif self.sp.y>self.height+75:
self.go=0
elif self.sp2.visible==False:
self.go=2
elif self.sp2.x<-75:
self.go=2
elif self.sp2.x>self.width+75:
self.go=2
elif self.sp2.y<-75:
self.go=2
elif self.sp2.y>self.height+75:
self.go=2
if self.go==1:
for ship in self.getSpritesbyClass(Spaceship):
ship.step()
for ship2 in self.getSpritesbyClass(Spaceship2):
ship2.step()
if self.go==0:
print("Player 2 wins!")
self.ste=False
if self.go==2:
print("Player 1 wins!")
self.ste=False
myapp=Spacewar()
myapp.run() | [
"ggame.ImageAsset",
"math.cos",
"math.sin",
"random.randint",
"ggame.Frame"
] | [((1939, 1973), 'ggame.ImageAsset', 'ImageAsset', (['"""images/starfield.jpg"""'], {}), "('images/starfield.jpg')\n", (1949, 1973), False, 'from ggame import App, Sprite, ImageAsset, Frame\n'), ((2111, 2139), 'ggame.ImageAsset', 'ImageAsset', (['"""images/sun.png"""'], {}), "('images/sun.png')\n", (2121, 2139), False, 'from ggame import App, Sprite, ImageAsset, Frame\n'), ((630, 647), 'ggame.Frame', 'Frame', (['(0)', '(0)', '(8)', '(8)'], {}), '(0, 0, 8, 8)\n', (635, 647), False, 'from ggame import App, Sprite, ImageAsset, Frame\n'), ((1304, 1321), 'ggame.Frame', 'Frame', (['(0)', '(0)', '(8)', '(8)'], {}), '(0, 0, 8, 8)\n', (1309, 1321), False, 'from ggame import App, Sprite, ImageAsset, Frame\n'), ((2474, 2496), 'ggame.Frame', 'Frame', (['(227)', '(0)', '(65)', '(125)'], {}), '(227, 0, 65, 125)\n', (2479, 2496), False, 'from ggame import App, Sprite, ImageAsset, Frame\n'), ((4762, 4784), 'ggame.Frame', 'Frame', (['(227)', '(0)', '(65)', '(125)'], {}), '(227, 0, 65, 125)\n', (4767, 4784), False, 'from ggame import App, Sprite, ImageAsset, Frame\n'), ((7056, 7083), 'ggame.Frame', 'Frame', (['(0)', '(0)', '(4800 / 25)', '(195)'], {}), '(0, 0, 4800 / 25, 195)\n', (7061, 7083), False, 'from ggame import App, Sprite, ImageAsset, Frame\n'), ((3207, 3237), 'math.cos', 'math.cos', (['(math.pi - self.angle)'], {}), '(math.pi - self.angle)\n', (3215, 3237), False, 'import math\n'), ((3257, 3287), 'math.sin', 'math.sin', (['(math.pi - self.angle)'], {}), '(math.pi - self.angle)\n', (3265, 3287), False, 'import math\n'), ((5541, 5571), 'math.cos', 'math.cos', (['(math.pi - self.angle)'], {}), '(math.pi - self.angle)\n', (5549, 5571), False, 'import math\n'), ((5591, 5621), 'math.sin', 'math.sin', (['(math.pi - self.angle)'], {}), '(math.pi - self.angle)\n', (5599, 5621), False, 'import math\n'), ((7846, 7875), 'random.randint', 'random.randint', (['(0)', 'self.width'], {}), '(0, self.width)\n', (7860, 7875), False, 'import random\n'), ((7875, 7905), 'random.randint', 'random.randint', (['(0)', 'self.height'], {}), '(0, self.height)\n', (7889, 7905), False, 'import random\n')] |
import normalize_sentences
import spacy
nlp = spacy.load('de_core_news_sm')
test_sentence = 'Der schlaue Fuchs sagte "Treffen um 16:20 Uhr!" aber war schon 20 Minuten früher da. Im Jahre 1995 schuf er das Gedicht.'
def test_sent(test_sentence):
result = normalize_sentences.normalize(nlp, test_sentence)
print(test_sentence, '->', result)
test_sent('Der schlaue Fuchs sagte "Treffen um 16:20 Uhr!" aber war schon 20 Minuten früher da. Im Jahre 1995 schuf er das Gedicht.')
test_sent('Er war von 1920 bis 1988 durchgehend beschäftigt.')
| [
"spacy.load",
"normalize_sentences.normalize"
] | [((47, 76), 'spacy.load', 'spacy.load', (['"""de_core_news_sm"""'], {}), "('de_core_news_sm')\n", (57, 76), False, 'import spacy\n'), ((261, 310), 'normalize_sentences.normalize', 'normalize_sentences.normalize', (['nlp', 'test_sentence'], {}), '(nlp, test_sentence)\n', (290, 310), False, 'import normalize_sentences\n')] |
from app import app
import unittest
import base64
import json
class TestLogin(unittest.TestCase):
def setUp(self):
app.config['TESTING'] = True
self.app = app.test_client()
self.user_name = "<EMAIL>"
self.password = "<PASSWORD>"
self.valid_credentials = base64.b64encode(b'<EMAIL>:123@Abcd').decode('utf-8')
self.invalid_password = base64.b64encode(b'<EMAIL>:<PASSWORD>').decode('utf-8')
self.invalid_username = base64.b64encode(b'<EMAIL>:<PASSWORD>').decode('utf-8')
def test_user_create_recipe_invalid_credentials(self):
datajson=json.dumps({
"cook_time_in_min": 15,
"prep_time_in_min": 15,
"title": "Creamy Cajun Chicken Pasta",
"cuisine": "Italian",
"servings": 2,
"ingredients": [
"4 ounces linguine pasta",
"2 boneless, skinless chicken breast halves, sliced into thin strips",
"2 teaspoons Cajun seasoning",
"2 tablespoons butter"
],
"steps": [
{
"position": 1,
"items": "some text here"
}
],
"nutrition_information": {
"calories": 100,
"cholesterol_in_mg": 4,
"sodium_in_mg": 100,
"carbohydrates_in_grams": 53.7,
"protein_in_grams": 53.7
}
})
response = self.app.post(
'/v1/recipe/', data=datajson, content_type='application/json',
headers={'Authorization': 'Basic ' + self.invalid_password})
self.assertEqual(response.status_code, 401)
def test_user_recipe_get(self):
response = self.app.get(
'/v1/recipe/f5e02bd4-55da-4243-b7fb-980b230a1138')
self.assertEqual(response.status_code, 404)
# def test_user_recipe_delete(self):
# response = self.app.delete(
# '/v1/recipe/f5e02bd4-55da-4243-b7fb-980b230a1138', headers={'Authorization': 'Basic ' + self.valid_credentials})
# self.assertEqual(response.status_code, 403)
| [
"base64.b64encode",
"json.dumps",
"app.app.test_client"
] | [((177, 194), 'app.app.test_client', 'app.test_client', ([], {}), '()\n', (192, 194), False, 'from app import app\n'), ((607, 1145), 'json.dumps', 'json.dumps', (["{'cook_time_in_min': 15, 'prep_time_in_min': 15, 'title':\n 'Creamy Cajun Chicken Pasta', 'cuisine': 'Italian', 'servings': 2,\n 'ingredients': ['4 ounces linguine pasta',\n '2 boneless, skinless chicken breast halves, sliced into thin strips',\n '2 teaspoons Cajun seasoning', '2 tablespoons butter'], 'steps': [{\n 'position': 1, 'items': 'some text here'}], 'nutrition_information': {\n 'calories': 100, 'cholesterol_in_mg': 4, 'sodium_in_mg': 100,\n 'carbohydrates_in_grams': 53.7, 'protein_in_grams': 53.7}}"], {}), "({'cook_time_in_min': 15, 'prep_time_in_min': 15, 'title':\n 'Creamy Cajun Chicken Pasta', 'cuisine': 'Italian', 'servings': 2,\n 'ingredients': ['4 ounces linguine pasta',\n '2 boneless, skinless chicken breast halves, sliced into thin strips',\n '2 teaspoons Cajun seasoning', '2 tablespoons butter'], 'steps': [{\n 'position': 1, 'items': 'some text here'}], 'nutrition_information': {\n 'calories': 100, 'cholesterol_in_mg': 4, 'sodium_in_mg': 100,\n 'carbohydrates_in_grams': 53.7, 'protein_in_grams': 53.7}})\n", (617, 1145), False, 'import json\n'), ((300, 337), 'base64.b64encode', 'base64.b64encode', (["b'<EMAIL>:123@Abcd'"], {}), "(b'<EMAIL>:123@Abcd')\n", (316, 337), False, 'import base64\n'), ((386, 425), 'base64.b64encode', 'base64.b64encode', (["b'<EMAIL>:<PASSWORD>'"], {}), "(b'<EMAIL>:<PASSWORD>')\n", (402, 425), False, 'import base64\n'), ((474, 513), 'base64.b64encode', 'base64.b64encode', (["b'<EMAIL>:<PASSWORD>'"], {}), "(b'<EMAIL>:<PASSWORD>')\n", (490, 513), False, 'import base64\n')] |
from __future__ import division
import copy
from functools import reduce
import numpy
import six
from mpilot import params
from mpilot.commands import Command
from mpilot.libraries.eems.exceptions import (
MismatchedWeights,
MixedArrayLengths,
DuplicateRawValues,
)
from mpilot.libraries.eems.mixins import SameArrayShapeMixin
from mpilot.utils import insure_fuzzy
class Copy(Command):
"""Copies the data from another field"""
display_name = "Copy"
inputs = {"InFieldName": params.ResultParameter(params.DataParameter())}
output = params.DataParameter()
def execute(self, **kwargs):
return numpy.copy(kwargs["InFieldName"].result)
class AMinusB(SameArrayShapeMixin, Command):
"""Performs A - B"""
display_name = "A Minus B"
inputs = {
"A": params.ResultParameter(params.DataParameter(), is_fuzzy=False),
"B": params.ResultParameter(params.DataParameter(), is_fuzzy=False),
}
output = params.DataParameter()
def execute(self, **kwargs):
a = kwargs["A"].result
b = kwargs["B"].result
self.validate_array_shapes([a, b], lineno=self.lineno)
return a - b
class Sum(SameArrayShapeMixin, Command):
"""Sums input variables"""
display_name = "Sum"
inputs = {
"InFieldNames": params.ListParameter(
params.ResultParameter(params.DataParameter(), is_fuzzy=False)
)
}
output = params.DataParameter()
def execute(self, **kwargs):
arrays = [c.result for c in kwargs["InFieldNames"]]
self.validate_array_shapes(arrays, lineno=self.lineno)
result = arrays[0].copy()
for arr in arrays[1:]:
result += arr
return result
class WeightedSum(SameArrayShapeMixin, Command):
"""Takes the weighted sum of input variables"""
display_name = "Weighted Sum"
inputs = {
"InFieldNames": params.ListParameter(
params.ResultParameter(params.DataParameter(), is_fuzzy=False)
),
"Weights": params.ListParameter(params.NumberParameter()),
}
output = params.DataParameter()
def execute(self, **kwargs):
weights = kwargs["Weights"]
arrays = [c.result for c in kwargs["InFieldNames"]]
if len(weights) != len(arrays):
raise MismatchedWeights(len(weights), len(arrays))
self.validate_array_shapes(arrays, lineno=self.lineno)
result = arrays[0] * weights[0]
for weight, arr in zip(weights[1:], arrays[1:]):
result += arr * weight
return result
class Multiply(SameArrayShapeMixin, Command):
"""Multiplies input variables"""
display_name = "Multiply"
inputs = {
"InFieldNames": params.ListParameter(
params.ResultParameter(params.DataParameter(), is_fuzzy=False)
)
}
output = params.DataParameter()
def execute(self, **kwargs):
arrays = [c.result for c in kwargs["InFieldNames"]]
self.validate_array_shapes(arrays, lineno=self.lineno)
result = numpy.copy(arrays[0])
for arr in arrays[1:]:
result *= arr
return result
class ADividedByB(SameArrayShapeMixin, Command):
"""Performs A / B"""
display_name = "A Divided By B"
inputs = {
"A": params.ResultParameter(params.DataParameter(), is_fuzzy=False),
"B": params.ResultParameter(params.DataParameter(), is_fuzzy=False),
}
output = params.DataParameter()
def execute(self, **kwargs):
a = kwargs["A"].result
b = kwargs["B"].result
self.validate_array_shapes([a, b], lineno=self.lineno)
return a / b
class Minimum(SameArrayShapeMixin, Command):
"""Takes the minimum input variables"""
display_name = "Minimum"
inputs = {
"InFieldNames": params.ListParameter(
params.ResultParameter(params.DataParameter(), is_fuzzy=False)
)
}
output = params.DataParameter()
def execute(self, **kwargs):
arrays = [c.result for c in kwargs["InFieldNames"]]
self.validate_array_shapes(arrays, lineno=self.lineno)
return reduce(lambda x, y: numpy.ma.minimum(x, y), arrays)
class Maximum(SameArrayShapeMixin, Command):
"""Takes the maximum input variables"""
display_name = "Maximum"
inputs = {
"InFieldNames": params.ListParameter(
params.ResultParameter(params.DataParameter(), is_fuzzy=False)
)
}
output = params.DataParameter()
def execute(self, **kwargs):
arrays = [c.result for c in kwargs["InFieldNames"]]
self.validate_array_shapes(arrays, lineno=self.lineno)
return reduce(lambda x, y: numpy.ma.maximum(x, y), arrays)
class Mean(SameArrayShapeMixin, Command):
"""Mean of input variables"""
display_name = "Mean"
inputs = {
"InFieldNames": params.ListParameter(
params.ResultParameter(params.DataParameter(), is_fuzzy=False)
)
}
output = params.DataParameter()
def execute(self, **kwargs):
arrays = [c.result for c in kwargs["InFieldNames"]]
self.validate_array_shapes(arrays, lineno=self.lineno)
return sum(arrays) / len(arrays)
class WeightedMean(SameArrayShapeMixin, Command):
"""Takes the weighted mean of input variables"""
display_name = "Weighted Mean"
inputs = {
"InFieldNames": params.ListParameter(
params.ResultParameter(params.DataParameter(), is_fuzzy=False)
),
"Weights": params.ListParameter(params.NumberParameter()),
}
output = params.DataParameter()
def execute(self, **kwargs):
weights = kwargs["Weights"]
arrays = [c.result for c in kwargs["InFieldNames"]]
if len(weights) != len(arrays):
raise MismatchedWeights(len(weights), len(arrays))
self.validate_array_shapes(arrays, lineno=self.lineno)
result = arrays[0] * weights[0]
for weight, arr in zip(weights[1:], arrays[1:]):
result += arr * weight
return result / sum(weights)
class Normalize(Command):
"""Normalizes the data from another field to range (default 0:1)"""
display_name = "Normalize"
inputs = {
"InFieldName": params.ResultParameter(params.DataParameter(), is_fuzzy=False),
"StartVal": params.NumberParameter(required=False),
"EndVal": params.NumberParameter(required=False),
}
output = params.DataParameter()
def execute(self, **kwargs):
arr = kwargs["InFieldName"].result
start = kwargs.get("StartVal", 0)
end = kwargs.get("EndVal", 1)
arr_min = arr.min()
arr_max = arr.max()
return (arr - arr_min) * (start - end) / (arr_min - arr_max) + start
class NormalizeZScore(Command):
"""Converts input values into normalized values using linear interpolation based on Z Score"""
display_name = "Normalize by Z Score"
inputs = {
"InFieldName": params.ResultParameter(params.DataParameter(), is_fuzzy=False),
"TrueThresholdZScore": params.NumberParameter(required=False),
"FalseThresholdZScore": params.NumberParameter(required=False),
"StartVal": params.NumberParameter(required=False),
"EndVal": params.NumberParameter(required=False),
}
output = params.DataParameter()
def execute(self, **kwargs):
arr = kwargs["InFieldName"].result
true_threshold = float(kwargs.get("TrueThresholdZScore", 0))
false_threshold = float(kwargs.get("FalseThresholdZScore", 1))
start = kwargs.get("StartVal", 0)
end = kwargs.get("EndVal", 1)
raw_mean = numpy.ma.mean(arr)
raw_std = numpy.ma.std(arr)
x1 = raw_mean + raw_std * true_threshold
x2 = raw_mean + raw_std * false_threshold
y1 = end
y2 = start
result = arr.copy()
result -= x1
result *= y2 - y1
result /= x2 - x1
result += y1
# despite the name, `insure_fuzzy` works to constrain values to any range
return insure_fuzzy(result, start, end)
class NormalizeCat(Command):
"""Converts integer input values into narmalized values based on user specification"""
display_name = "Normalize by Category"
inputs = {
"InFieldName": params.ResultParameter(params.DataParameter(), is_fuzzy=False),
"RawValues": params.ListParameter(params.NumberParameter()),
"NormalValues": params.ListParameter(params.NumberParameter()),
"DefaultNormalValue": params.NumberParameter(),
}
output = params.DataParameter()
def execute(self, **kwargs):
arr = kwargs["InFieldName"].result
raw_values = kwargs["RawValues"]
normal_values = kwargs["NormalValues"]
default_normal_value = kwargs["DefaultNormalValue"]
if len(raw_values) != len(normal_values):
raise MixedArrayLengths(
len(raw_values), len(normal_values), lineno=self.lineno
)
if len(raw_values) != len(set(raw_values)):
raise DuplicateRawValues(lineno=self.argument_lines.get("RawValues"))
result = numpy.ma.array(
numpy.full(arr.shape, default_normal_value, dtype=float)
)
for raw, normal in zip(raw_values, normal_values):
result[arr.data == raw] = normal
return result
class NormalizeCurve(Command):
"""Converts input values into normalized values based on user-defined curve"""
display_name = "Normalize Curve"
inputs = {
"InFieldName": params.ResultParameter(params.DataParameter(), is_fuzzy=False),
"RawValues": params.ListParameter(params.NumberParameter()),
"NormalValues": params.ListParameter(params.NumberParameter()),
}
output = params.DataParameter()
def execute(self, **kwargs):
arr = kwargs["InFieldName"].result
raw_values = kwargs["RawValues"]
normal_values = kwargs["NormalValues"]
if len(raw_values) != len(normal_values):
raise MixedArrayLengths(
len(raw_values), len(normal_values), lineno=self.lineno
)
if len(raw_values) != len(set(raw_values)):
raise DuplicateRawValues(lineno=self.argument_lines.get("RawValues"))
result = numpy.ma.empty(arr.shape, dtype=float)
value_pairs = sorted(zip(raw_values, normal_values))
# For raw values less than the lowest raw value, set them to the corresponding normal value
result[arr <= value_pairs[0][0]] = value_pairs[0][1]
# Assign normal values for each of the line segments that approximate the curve
for i, (raw, normal) in list(enumerate(value_pairs))[1:]:
prev_raw = value_pairs[i - 1][0]
prev_normal = value_pairs[i - 1][1]
m = (normal - prev_normal) / (raw - prev_raw)
b = prev_normal - m * prev_raw
where_idx = numpy.where(
numpy.logical_and(arr.data > prev_raw, arr.data <= raw)
)
result[where_idx] = arr.data[where_idx]
result[where_idx] *= m
result[where_idx] += b
# For raw values greater than the highest raw value, set them to the corresponding normal value
result[arr > value_pairs[-1][0]] = value_pairs[-1][1]
result.mask = arr.mask.copy()
return result
class NormalizeMeanToMid(NormalizeCurve):
"""Uses "NormalizeCurve" to create a non-linear transformation that is a good match for the input data"""
display_name = "Mean to Mid"
inputs = {
"InFieldName": params.ResultParameter(params.DataParameter(), is_fuzzy=False),
"IgnoreZeros": params.BooleanParameter(),
"NormalValues": params.ListParameter(params.NumberParameter()),
}
output = params.DataParameter()
def execute(self, **kwargs):
arr = kwargs["InFieldName"].result
ignore_zeros = kwargs["IgnoreZeros"]
low_value = arr.min()
high_value = arr.max()
if ignore_zeros:
arr = arr[arr != 0]
mean_value = arr.mean()
below_mean = arr[arr <= mean_value]
above_mean = arr[arr > mean_value]
high_mean = above_mean.compressed().mean()
low_mean = below_mean.compressed().mean()
raw_values = [low_value, low_mean, mean_value, high_mean, high_value]
normal_values = kwargs["NormalValues"][:]
if raw_values[-1] == raw_values[-2]:
del raw_values[-2]
del normal_values[-2]
if raw_values[0] == raw_values[1]:
del raw_values[1]
del normal_values[1]
kwargs = copy.copy(kwargs)
kwargs["RawValues"] = raw_values
kwargs["NormalValues"] = normal_values
return super(NormalizeMeanToMid, self).execute(**kwargs)
class NormalizeCurveZScore(Command):
"""Converts input values into narmalized values based on user-defined curve"""
display_name = "Normalize Curve by Z Score"
inputs = {
"InFieldName": params.ResultParameter(params.DataParameter(), is_fuzzy=False),
"ZScoreValues": params.ListParameter(params.NumberParameter()),
"NormalValues": params.ListParameter(params.NumberParameter()),
}
output = params.DataParameter()
def execute(self, **kwargs):
arr = kwargs["InFieldName"].result
z_score_values = kwargs["ZScoreValues"]
normal_values = kwargs["NormalValues"]
if len(z_score_values) != len(normal_values):
raise MixedArrayLengths(
len(z_score_values), len(normal_values), lineno=self.lineno
)
raw_mean = numpy.ma.mean(arr)
raw_std = numpy.ma.std(arr)
raw_values = [raw_mean + value * raw_std for value in z_score_values]
result = numpy.ma.empty(arr.shape, dtype=float)
value_pairs = sorted(zip(raw_values, normal_values))
# For raw values less than the lowest raw value, set them to the corresponding normal value
result[arr <= value_pairs[0][0]] = value_pairs[0][1]
# Assign normal values for each of the line segments that approximate the curve
for i, (raw, normal) in list(enumerate(value_pairs))[1:]:
prev_raw = value_pairs[i - 1][0]
prev_normal = value_pairs[i - 1][1]
m = (normal - prev_normal) / (raw - prev_raw)
b = prev_normal - m * prev_raw
where_idx = numpy.where(
numpy.logical_and(arr.data > prev_raw, arr.data <= raw)
)
result[where_idx] = arr.data[where_idx]
result[where_idx] *= m
result[where_idx] += b
# For raw values greater than the highest raw value, set them to the corresponding normal value
result[arr > value_pairs[-1][0]] = value_pairs[-1][1]
result.mask = arr.mask.copy()
return result
class PrintVars(Command):
"""Prints each variable in a list of variable names."""
display_name = "Print variable(s) to screen or file"
inputs = {
"InFieldNames": params.ListParameter(params.ResultParameter()),
"OutFileName": params.PathParameter(must_exist=False, required=False),
}
output = params.BooleanParameter()
def execute(self, **kwargs):
commands = kwargs["InFieldNames"]
out_path = kwargs.get("OutFileName")
if out_path:
with open(out_path, "w") as f_out:
f_out.write(
"\n".join(
"{}: {}".format(c.result_name, c.result) for c in commands
)
)
else:
for command in kwargs["InFieldNames"]:
print("{}: {}".format(command.result_name, command.result))
return True
| [
"numpy.copy",
"mpilot.params.ResultParameter",
"numpy.ma.std",
"numpy.ma.mean",
"mpilot.utils.insure_fuzzy",
"numpy.full",
"mpilot.params.PathParameter",
"mpilot.params.NumberParameter",
"numpy.ma.minimum",
"numpy.ma.maximum",
"numpy.logical_and",
"mpilot.params.DataParameter",
"numpy.ma.emp... | [((565, 587), 'mpilot.params.DataParameter', 'params.DataParameter', ([], {}), '()\n', (585, 587), False, 'from mpilot import params\n'), ((970, 992), 'mpilot.params.DataParameter', 'params.DataParameter', ([], {}), '()\n', (990, 992), False, 'from mpilot import params\n'), ((1439, 1461), 'mpilot.params.DataParameter', 'params.DataParameter', ([], {}), '()\n', (1459, 1461), False, 'from mpilot import params\n'), ((2106, 2128), 'mpilot.params.DataParameter', 'params.DataParameter', ([], {}), '()\n', (2126, 2128), False, 'from mpilot import params\n'), ((2864, 2886), 'mpilot.params.DataParameter', 'params.DataParameter', ([], {}), '()\n', (2884, 2886), False, 'from mpilot import params\n'), ((3465, 3487), 'mpilot.params.DataParameter', 'params.DataParameter', ([], {}), '()\n', (3485, 3487), False, 'from mpilot import params\n'), ((3955, 3977), 'mpilot.params.DataParameter', 'params.DataParameter', ([], {}), '()\n', (3975, 3977), False, 'from mpilot import params\n'), ((4489, 4511), 'mpilot.params.DataParameter', 'params.DataParameter', ([], {}), '()\n', (4509, 4511), False, 'from mpilot import params\n'), ((5007, 5029), 'mpilot.params.DataParameter', 'params.DataParameter', ([], {}), '()\n', (5027, 5029), False, 'from mpilot import params\n'), ((5603, 5625), 'mpilot.params.DataParameter', 'params.DataParameter', ([], {}), '()\n', (5623, 5625), False, 'from mpilot import params\n'), ((6466, 6488), 'mpilot.params.DataParameter', 'params.DataParameter', ([], {}), '()\n', (6486, 6488), False, 'from mpilot import params\n'), ((7339, 7361), 'mpilot.params.DataParameter', 'params.DataParameter', ([], {}), '()\n', (7359, 7361), False, 'from mpilot import params\n'), ((8608, 8630), 'mpilot.params.DataParameter', 'params.DataParameter', ([], {}), '()\n', (8628, 8630), False, 'from mpilot import params\n'), ((9822, 9844), 'mpilot.params.DataParameter', 'params.DataParameter', ([], {}), '()\n', (9842, 9844), False, 'from mpilot import params\n'), ((11855, 11877), 'mpilot.params.DataParameter', 'params.DataParameter', ([], {}), '()\n', (11875, 11877), False, 'from mpilot import params\n'), ((13314, 13336), 'mpilot.params.DataParameter', 'params.DataParameter', ([], {}), '()\n', (13334, 13336), False, 'from mpilot import params\n'), ((15280, 15305), 'mpilot.params.BooleanParameter', 'params.BooleanParameter', ([], {}), '()\n', (15303, 15305), False, 'from mpilot import params\n'), ((637, 677), 'numpy.copy', 'numpy.copy', (["kwargs['InFieldName'].result"], {}), "(kwargs['InFieldName'].result)\n", (647, 677), False, 'import numpy\n'), ((3062, 3083), 'numpy.copy', 'numpy.copy', (['arrays[0]'], {}), '(arrays[0])\n', (3072, 3083), False, 'import numpy\n'), ((6349, 6387), 'mpilot.params.NumberParameter', 'params.NumberParameter', ([], {'required': '(False)'}), '(required=False)\n', (6371, 6387), False, 'from mpilot import params\n'), ((6407, 6445), 'mpilot.params.NumberParameter', 'params.NumberParameter', ([], {'required': '(False)'}), '(required=False)\n', (6429, 6445), False, 'from mpilot import params\n'), ((7090, 7128), 'mpilot.params.NumberParameter', 'params.NumberParameter', ([], {'required': '(False)'}), '(required=False)\n', (7112, 7128), False, 'from mpilot import params\n'), ((7162, 7200), 'mpilot.params.NumberParameter', 'params.NumberParameter', ([], {'required': '(False)'}), '(required=False)\n', (7184, 7200), False, 'from mpilot import params\n'), ((7222, 7260), 'mpilot.params.NumberParameter', 'params.NumberParameter', ([], {'required': '(False)'}), '(required=False)\n', (7244, 7260), False, 'from mpilot import params\n'), ((7280, 7318), 'mpilot.params.NumberParameter', 'params.NumberParameter', ([], {'required': '(False)'}), '(required=False)\n', (7302, 7318), False, 'from mpilot import params\n'), ((7679, 7697), 'numpy.ma.mean', 'numpy.ma.mean', (['arr'], {}), '(arr)\n', (7692, 7697), False, 'import numpy\n'), ((7716, 7733), 'numpy.ma.std', 'numpy.ma.std', (['arr'], {}), '(arr)\n', (7728, 7733), False, 'import numpy\n'), ((8091, 8123), 'mpilot.utils.insure_fuzzy', 'insure_fuzzy', (['result', 'start', 'end'], {}), '(result, start, end)\n', (8103, 8123), False, 'from mpilot.utils import insure_fuzzy\n'), ((8563, 8587), 'mpilot.params.NumberParameter', 'params.NumberParameter', ([], {}), '()\n', (8585, 8587), False, 'from mpilot import params\n'), ((10337, 10375), 'numpy.ma.empty', 'numpy.ma.empty', (['arr.shape'], {'dtype': 'float'}), '(arr.shape, dtype=float)\n', (10351, 10375), False, 'import numpy\n'), ((11737, 11762), 'mpilot.params.BooleanParameter', 'params.BooleanParameter', ([], {}), '()\n', (11760, 11762), False, 'from mpilot import params\n'), ((12706, 12723), 'copy.copy', 'copy.copy', (['kwargs'], {}), '(kwargs)\n', (12715, 12723), False, 'import copy\n'), ((13711, 13729), 'numpy.ma.mean', 'numpy.ma.mean', (['arr'], {}), '(arr)\n', (13724, 13729), False, 'import numpy\n'), ((13748, 13765), 'numpy.ma.std', 'numpy.ma.std', (['arr'], {}), '(arr)\n', (13760, 13765), False, 'import numpy\n'), ((13863, 13901), 'numpy.ma.empty', 'numpy.ma.empty', (['arr.shape'], {'dtype': 'float'}), '(arr.shape, dtype=float)\n', (13877, 13901), False, 'import numpy\n'), ((15205, 15259), 'mpilot.params.PathParameter', 'params.PathParameter', ([], {'must_exist': '(False)', 'required': '(False)'}), '(must_exist=False, required=False)\n', (15225, 15259), False, 'from mpilot import params\n'), ((527, 549), 'mpilot.params.DataParameter', 'params.DataParameter', ([], {}), '()\n', (547, 549), False, 'from mpilot import params\n'), ((833, 855), 'mpilot.params.DataParameter', 'params.DataParameter', ([], {}), '()\n', (853, 855), False, 'from mpilot import params\n'), ((910, 932), 'mpilot.params.DataParameter', 'params.DataParameter', ([], {}), '()\n', (930, 932), False, 'from mpilot import params\n'), ((2060, 2084), 'mpilot.params.NumberParameter', 'params.NumberParameter', ([], {}), '()\n', (2082, 2084), False, 'from mpilot import params\n'), ((3328, 3350), 'mpilot.params.DataParameter', 'params.DataParameter', ([], {}), '()\n', (3348, 3350), False, 'from mpilot import params\n'), ((3405, 3427), 'mpilot.params.DataParameter', 'params.DataParameter', ([], {}), '()\n', (3425, 3427), False, 'from mpilot import params\n'), ((5557, 5581), 'mpilot.params.NumberParameter', 'params.NumberParameter', ([], {}), '()\n', (5579, 5581), False, 'from mpilot import params\n'), ((6288, 6310), 'mpilot.params.DataParameter', 'params.DataParameter', ([], {}), '()\n', (6308, 6310), False, 'from mpilot import params\n'), ((7018, 7040), 'mpilot.params.DataParameter', 'params.DataParameter', ([], {}), '()\n', (7038, 7040), False, 'from mpilot import params\n'), ((8351, 8373), 'mpilot.params.DataParameter', 'params.DataParameter', ([], {}), '()\n', (8371, 8373), False, 'from mpilot import params\n'), ((8434, 8458), 'mpilot.params.NumberParameter', 'params.NumberParameter', ([], {}), '()\n', (8456, 8458), False, 'from mpilot import params\n'), ((8506, 8530), 'mpilot.params.NumberParameter', 'params.NumberParameter', ([], {}), '()\n', (8528, 8530), False, 'from mpilot import params\n'), ((9211, 9267), 'numpy.full', 'numpy.full', (['arr.shape', 'default_normal_value'], {'dtype': 'float'}), '(arr.shape, default_normal_value, dtype=float)\n', (9221, 9267), False, 'import numpy\n'), ((9621, 9643), 'mpilot.params.DataParameter', 'params.DataParameter', ([], {}), '()\n', (9641, 9643), False, 'from mpilot import params\n'), ((9704, 9728), 'mpilot.params.NumberParameter', 'params.NumberParameter', ([], {}), '()\n', (9726, 9728), False, 'from mpilot import params\n'), ((9776, 9800), 'mpilot.params.NumberParameter', 'params.NumberParameter', ([], {}), '()\n', (9798, 9800), False, 'from mpilot import params\n'), ((11673, 11695), 'mpilot.params.DataParameter', 'params.DataParameter', ([], {}), '()\n', (11693, 11695), False, 'from mpilot import params\n'), ((11809, 11833), 'mpilot.params.NumberParameter', 'params.NumberParameter', ([], {}), '()\n', (11831, 11833), False, 'from mpilot import params\n'), ((13110, 13132), 'mpilot.params.DataParameter', 'params.DataParameter', ([], {}), '()\n', (13130, 13132), False, 'from mpilot import params\n'), ((13196, 13220), 'mpilot.params.NumberParameter', 'params.NumberParameter', ([], {}), '()\n', (13218, 13220), False, 'from mpilot import params\n'), ((13268, 13292), 'mpilot.params.NumberParameter', 'params.NumberParameter', ([], {}), '()\n', (13290, 13292), False, 'from mpilot import params\n'), ((15155, 15179), 'mpilot.params.ResultParameter', 'params.ResultParameter', ([], {}), '()\n', (15177, 15179), False, 'from mpilot import params\n'), ((1370, 1392), 'mpilot.params.DataParameter', 'params.DataParameter', ([], {}), '()\n', (1390, 1392), False, 'from mpilot import params\n'), ((1969, 1991), 'mpilot.params.DataParameter', 'params.DataParameter', ([], {}), '()\n', (1989, 1991), False, 'from mpilot import params\n'), ((2795, 2817), 'mpilot.params.DataParameter', 'params.DataParameter', ([], {}), '()\n', (2815, 2817), False, 'from mpilot import params\n'), ((3886, 3908), 'mpilot.params.DataParameter', 'params.DataParameter', ([], {}), '()\n', (3906, 3908), False, 'from mpilot import params\n'), ((4171, 4193), 'numpy.ma.minimum', 'numpy.ma.minimum', (['x', 'y'], {}), '(x, y)\n', (4187, 4193), False, 'import numpy\n'), ((4420, 4442), 'mpilot.params.DataParameter', 'params.DataParameter', ([], {}), '()\n', (4440, 4442), False, 'from mpilot import params\n'), ((4705, 4727), 'numpy.ma.maximum', 'numpy.ma.maximum', (['x', 'y'], {}), '(x, y)\n', (4721, 4727), False, 'import numpy\n'), ((4938, 4960), 'mpilot.params.DataParameter', 'params.DataParameter', ([], {}), '()\n', (4958, 4960), False, 'from mpilot import params\n'), ((5466, 5488), 'mpilot.params.DataParameter', 'params.DataParameter', ([], {}), '()\n', (5486, 5488), False, 'from mpilot import params\n'), ((11003, 11058), 'numpy.logical_and', 'numpy.logical_and', (['(arr.data > prev_raw)', '(arr.data <= raw)'], {}), '(arr.data > prev_raw, arr.data <= raw)\n', (11020, 11058), False, 'import numpy\n'), ((14529, 14584), 'numpy.logical_and', 'numpy.logical_and', (['(arr.data > prev_raw)', '(arr.data <= raw)'], {}), '(arr.data > prev_raw, arr.data <= raw)\n', (14546, 14584), False, 'import numpy\n')] |
# Generated by Django 3.0.5 on 2020-06-10 06:18
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0021_auto_20200606_0449'),
]
operations = [
migrations.AlterField(
model_name='endereco',
name='bairro',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='endereco',
name='complemento',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='pedido',
name='status',
field=models.CharField(choices=[('em_analise', 'em análise'), ('preparando', 'preparando envio'), ('despachado', 'despachado'), ('entregue', 'entregue'), ('suspenso', 'suspenso'), ('cancelado', 'cancelado')], default='em_analise', max_length=20),
),
]
| [
"django.db.models.CharField"
] | [((334, 389), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(100)', 'null': '(True)'}), '(blank=True, max_length=100, null=True)\n', (350, 389), False, 'from django.db import migrations, models\n'), ((518, 573), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(100)', 'null': '(True)'}), '(blank=True, max_length=100, null=True)\n', (534, 573), False, 'from django.db import migrations, models\n'), ((695, 946), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('em_analise', 'em análise'), ('preparando', 'preparando envio'), (\n 'despachado', 'despachado'), ('entregue', 'entregue'), ('suspenso',\n 'suspenso'), ('cancelado', 'cancelado')]", 'default': '"""em_analise"""', 'max_length': '(20)'}), "(choices=[('em_analise', 'em análise'), ('preparando',\n 'preparando envio'), ('despachado', 'despachado'), ('entregue',\n 'entregue'), ('suspenso', 'suspenso'), ('cancelado', 'cancelado')],\n default='em_analise', max_length=20)\n", (711, 946), False, 'from django.db import migrations, models\n')] |
from flask import render_template, redirect, url_for, flash, request
from werkzeug.urls import url_parse
from flask_login import login_user, logout_user, current_user
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, BooleanField, SubmitField, TextAreaField
from wtforms.validators import ValidationError, DataRequired, Email, EqualTo
from flask_babel import _, lazy_gettext as _l
from werkzeug.datastructures import MultiDict
from .models.user import User
from .models.messages import *
from flask import Blueprint
messages_bp = Blueprint('messages', __name__)
messages_email_bp = Blueprint('messages_email', __name__)
message_details_bp = Blueprint('message_details', __name__)
class MessageForm(FlaskForm):
recipient_email = StringField(_l('Recipient Email'), validators=[DataRequired(), Email()])
subject = StringField(_l('Subject'), validators=[DataRequired()])
message = TextAreaField('Message', validators=[DataRequired()])
submit = SubmitField(_l('Send'))
def validate_email(self, email):
if not User.email_exists(email.data):
raise ValidationError(_('Please check your email - recipient not found!'))
@messages_bp.route('/messages', methods=['GET', 'POST'])
def messages():
messages = get_messages()
unread = num_unread()
sent_msgs = get_sent_msgs()
form = MessageForm()
if form.validate_on_submit():
recipient_id = User.get_id_by_email(form.recipient_email.data)
if recipient_id == None:
flash('That email is not associated with any user.')
return redirect(url_for('messages.messages'))
elif form.recipient_email.data == current_user.email:
flash('You cannot send yourself a message!')
return redirect(url_for('messages.messages'))
elif send_message(recipient_id, form.subject.data, form.message.data):
flash('Your message has been sent!')
return redirect(url_for('messages.messages'))
return render_template('messages.html', title='My Messages', unread = unread, messages=messages, sent_msgs=sent_msgs, form=form)
@messages_email_bp.route('/messages/email/<email>', methods=['GET', 'POST'])
def text(email):
messages = get_messages()
unread = num_unread()
sent_msgs = get_sent_msgs()
form = MessageForm()
if request.method == 'GET':
form = MessageForm(formdata = MultiDict({
'recipient_email': email
}))
if form.validate_on_submit():
recipient_id = User.get_id_by_email(form.recipient_email.data)
if recipient_id == None:
flash('That email is not associated with any user.')
return redirect(url_for('messages.messages'))
elif form.recipient_email.data == current_user.email:
flash('You cannot send yourself a message!')
return redirect(url_for('messages.messages'))
elif send_message(recipient_id, form.subject.data, form.message.data):
flash('Your message has been sent!')
return redirect(url_for('messages.messages'))
return render_template('messages.html', title='My Messages', messages=messages, unread = unread, sent_msgs=sent_msgs, form=form)
@message_details_bp.route('/messages/msg/<mid>')
def detailed_messages(mid):
message = get_message_by_mid(mid)
unread = num_unread()
if message[1] == 'Unread' and current_user.id == message.recipient_id:
mark_message_read(mid)
return render_template('message_detail.html', unread= unread,title = 'Your Message', message=message)
| [
"flask.render_template",
"flask_babel._",
"werkzeug.datastructures.MultiDict",
"flask.flash",
"wtforms.validators.Email",
"flask.url_for",
"flask_babel.lazy_gettext",
"flask.Blueprint",
"wtforms.validators.DataRequired"
] | [((563, 594), 'flask.Blueprint', 'Blueprint', (['"""messages"""', '__name__'], {}), "('messages', __name__)\n", (572, 594), False, 'from flask import Blueprint\n'), ((615, 652), 'flask.Blueprint', 'Blueprint', (['"""messages_email"""', '__name__'], {}), "('messages_email', __name__)\n", (624, 652), False, 'from flask import Blueprint\n'), ((674, 712), 'flask.Blueprint', 'Blueprint', (['"""message_details"""', '__name__'], {}), "('message_details', __name__)\n", (683, 712), False, 'from flask import Blueprint\n'), ((2007, 2130), 'flask.render_template', 'render_template', (['"""messages.html"""'], {'title': '"""My Messages"""', 'unread': 'unread', 'messages': 'messages', 'sent_msgs': 'sent_msgs', 'form': 'form'}), "('messages.html', title='My Messages', unread=unread,\n messages=messages, sent_msgs=sent_msgs, form=form)\n", (2022, 2130), False, 'from flask import render_template, redirect, url_for, flash, request\n'), ((3104, 3227), 'flask.render_template', 'render_template', (['"""messages.html"""'], {'title': '"""My Messages"""', 'messages': 'messages', 'unread': 'unread', 'sent_msgs': 'sent_msgs', 'form': 'form'}), "('messages.html', title='My Messages', messages=messages,\n unread=unread, sent_msgs=sent_msgs, form=form)\n", (3119, 3227), False, 'from flask import render_template, redirect, url_for, flash, request\n'), ((3485, 3581), 'flask.render_template', 'render_template', (['"""message_detail.html"""'], {'unread': 'unread', 'title': '"""Your Message"""', 'message': 'message'}), "('message_detail.html', unread=unread, title='Your Message',\n message=message)\n", (3500, 3581), False, 'from flask import render_template, redirect, url_for, flash, request\n'), ((778, 799), 'flask_babel.lazy_gettext', '_l', (['"""Recipient Email"""'], {}), "('Recipient Email')\n", (780, 799), True, 'from flask_babel import _, lazy_gettext as _l\n'), ((865, 878), 'flask_babel.lazy_gettext', '_l', (['"""Subject"""'], {}), "('Subject')\n", (867, 878), True, 'from flask_babel import _, lazy_gettext as _l\n'), ((1002, 1012), 'flask_babel.lazy_gettext', '_l', (['"""Send"""'], {}), "('Send')\n", (1004, 1012), True, 'from flask_babel import _, lazy_gettext as _l\n'), ((1522, 1574), 'flask.flash', 'flash', (['"""That email is not associated with any user."""'], {}), "('That email is not associated with any user.')\n", (1527, 1574), False, 'from flask import render_template, redirect, url_for, flash, request\n'), ((2619, 2671), 'flask.flash', 'flash', (['"""That email is not associated with any user."""'], {}), "('That email is not associated with any user.')\n", (2624, 2671), False, 'from flask import render_template, redirect, url_for, flash, request\n'), ((813, 827), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (825, 827), False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo\n'), ((829, 836), 'wtforms.validators.Email', 'Email', ([], {}), '()\n', (834, 836), False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo\n'), ((892, 906), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (904, 906), False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo\n'), ((960, 974), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (972, 974), False, 'from wtforms.validators import ValidationError, DataRequired, Email, EqualTo\n'), ((1132, 1183), 'flask_babel._', '_', (['"""Please check your email - recipient not found!"""'], {}), "('Please check your email - recipient not found!')\n", (1133, 1183), False, 'from flask_babel import _, lazy_gettext as _l\n'), ((1603, 1631), 'flask.url_for', 'url_for', (['"""messages.messages"""'], {}), "('messages.messages')\n", (1610, 1631), False, 'from flask import render_template, redirect, url_for, flash, request\n'), ((1707, 1751), 'flask.flash', 'flash', (['"""You cannot send yourself a message!"""'], {}), "('You cannot send yourself a message!')\n", (1712, 1751), False, 'from flask import render_template, redirect, url_for, flash, request\n'), ((2408, 2445), 'werkzeug.datastructures.MultiDict', 'MultiDict', (["{'recipient_email': email}"], {}), "({'recipient_email': email})\n", (2417, 2445), False, 'from werkzeug.datastructures import MultiDict\n'), ((2700, 2728), 'flask.url_for', 'url_for', (['"""messages.messages"""'], {}), "('messages.messages')\n", (2707, 2728), False, 'from flask import render_template, redirect, url_for, flash, request\n'), ((2804, 2848), 'flask.flash', 'flash', (['"""You cannot send yourself a message!"""'], {}), "('You cannot send yourself a message!')\n", (2809, 2848), False, 'from flask import render_template, redirect, url_for, flash, request\n'), ((1780, 1808), 'flask.url_for', 'url_for', (['"""messages.messages"""'], {}), "('messages.messages')\n", (1787, 1808), False, 'from flask import render_template, redirect, url_for, flash, request\n'), ((1901, 1937), 'flask.flash', 'flash', (['"""Your message has been sent!"""'], {}), "('Your message has been sent!')\n", (1906, 1937), False, 'from flask import render_template, redirect, url_for, flash, request\n'), ((2877, 2905), 'flask.url_for', 'url_for', (['"""messages.messages"""'], {}), "('messages.messages')\n", (2884, 2905), False, 'from flask import render_template, redirect, url_for, flash, request\n'), ((2998, 3034), 'flask.flash', 'flash', (['"""Your message has been sent!"""'], {}), "('Your message has been sent!')\n", (3003, 3034), False, 'from flask import render_template, redirect, url_for, flash, request\n'), ((1966, 1994), 'flask.url_for', 'url_for', (['"""messages.messages"""'], {}), "('messages.messages')\n", (1973, 1994), False, 'from flask import render_template, redirect, url_for, flash, request\n'), ((3063, 3091), 'flask.url_for', 'url_for', (['"""messages.messages"""'], {}), "('messages.messages')\n", (3070, 3091), False, 'from flask import render_template, redirect, url_for, flash, request\n')] |
"""Useful constants.
"""
from rasterio.crs import CRS
WGS84_SRID = 4326
#: WGS84 CRS.
WGS84_CRS = CRS.from_epsg(WGS84_SRID)
WEB_MERCATOR_SRID = 3857
#: Web Mercator CRS.
WEB_MERCATOR_CRS = CRS.from_epsg(WEB_MERCATOR_SRID)
# Best widely used, equal area projection according to
# http://icaci.org/documents/ICC_proceedings/ICC2001/icc2001/file/f24014.doc
# (found on https://en.wikipedia.org/wiki/Winkel_tripel_projection#Comparison_with_other_projections)
#: Eckert IV CRS.
EQUAL_AREA_CRS = CRS({'proj': 'eck4'})
DEFAULT_SRID = WGS84_SRID
#: Default CRS, set to :py:data:`~telluric.constants.WGS84_CRS`.
DEFAULT_CRS = WGS84_CRS
def _MERCATOR_RESOLUTION_MAPPING(zoom_level):
return (2 * 20037508.342789244) / (256 * pow(2, zoom_level))
MERCATOR_RESOLUTION_MAPPING = dict((i, _MERCATOR_RESOLUTION_MAPPING(i)) for i in range(21))
RASTER_TYPE = 'raster'
| [
"rasterio.crs.CRS",
"rasterio.crs.CRS.from_epsg"
] | [((100, 125), 'rasterio.crs.CRS.from_epsg', 'CRS.from_epsg', (['WGS84_SRID'], {}), '(WGS84_SRID)\n', (113, 125), False, 'from rasterio.crs import CRS\n'), ((192, 224), 'rasterio.crs.CRS.from_epsg', 'CRS.from_epsg', (['WEB_MERCATOR_SRID'], {}), '(WEB_MERCATOR_SRID)\n', (205, 224), False, 'from rasterio.crs import CRS\n'), ((495, 516), 'rasterio.crs.CRS', 'CRS', (["{'proj': 'eck4'}"], {}), "({'proj': 'eck4'})\n", (498, 516), False, 'from rasterio.crs import CRS\n')] |
import configparser
""" VARIABLES """
config_Path = "EngineDataFile\EngineConfig\GEngineSettings.ini"
class GEngineConfig:
def __init__(self):
try:
with open(config_Path):
print("File exists")
except IOError:
print("Error opening " + str(config_Path) + ", creating new one")
config = configparser.ConfigParser()
config['DEFAULT'] = {
'GRAVITY':'True',
'GRAVITY_AMOUNT':'3',
'SHOOT':'True',
'SHOOT_SPEED':'1',
'BLOCK_SIZE':'32',
'HORIZONTAL_SPEED:':'4',
'VERTICAL_SPEED:':'8'
}
with open(config_Path,'w') as configfile:
config.write(configfile)
print("File successfully written.")
def ReturnData(self,data):
config = configparser.ConfigParser()
try:
with open(config_Path): pass
config.read(config_Path)
except IOError:
print("Error opening " + str(config_Path))
try:
returndata = (config['DEFAULT'][data]).replace('=','').strip()
return returndata
except KeyError:
print(str(data) + " wasn't in the configuration data")
return 0
| [
"configparser.ConfigParser"
] | [((927, 954), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (952, 954), False, 'import configparser\n'), ((358, 385), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (383, 385), False, 'import configparser\n')] |
#!/usr/bin/env python3
from cProfile import Profile
from numpy import linspace
from super_material.gap_energy import BCSGapEnergy
def run():
bcs_gap_energy = BCSGapEnergy(1.5e-3, 4000)
temperatures = linspace(0, bcs_gap_energy.critical_temperature(), 500)
with Profile() as profile:
for temperature in temperatures:
bcs_gap_energy.evaluate(temperature)
profile.print_stats()
if __name__ == "__main__":
run()
| [
"cProfile.Profile",
"super_material.gap_energy.BCSGapEnergy"
] | [((167, 193), 'super_material.gap_energy.BCSGapEnergy', 'BCSGapEnergy', (['(0.0015)', '(4000)'], {}), '(0.0015, 4000)\n', (179, 193), False, 'from super_material.gap_energy import BCSGapEnergy\n'), ((279, 288), 'cProfile.Profile', 'Profile', ([], {}), '()\n', (286, 288), False, 'from cProfile import Profile\n')] |
# -*- coding: utf-8 -*-
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from appProcafe.models import Department, Unit, Section, Risk, Position, Location,\
Paysheet, Type, CourseRequest, CourseChangeRequest
from appProcafe.models import Telephone, Document, Takes, Course
from appProcafe.models import User, UserProfile, UserApplication, RemoveRequest
from django.utils.translation import ugettext_lazy as _
admin.site.register(Department)
admin.site.register(Unit)
admin.site.register(Section)
admin.site.register(Type)
admin.site.register(Risk)
admin.site.register(Paysheet)
admin.site.register(Position)
admin.site.register(Location)
class TelephoneAdmin(admin.ModelAdmin):
list_display = ('user_ID', 'number')
admin.site.register(Telephone, TelephoneAdmin)
class TakesAdmin(admin.ModelAdmin):
list_display = ('user_ID', 'course_ID', 'term', 'year', 'status')
admin.site.register(Takes, TakesAdmin)
class CourseAdmin(admin.ModelAdmin):
list_display = ('name', 'number_hours')
search_fields = ['name']
admin.site.register(Course, CourseAdmin)
class UserProfileInLine(admin.StackedInline):
model = UserProfile
can_delete = False
verbose_name_plural = 'Perfil'
class MyUserAdmin(UserAdmin):
staff_fieldsets = (
(None, {'fields': ('username', 'password')}),
(_('Personal info'), {'fields': ('first_name', 'last_name', 'email')}),
# No permissions
(_('Permissions'), {'fields': ('is_active', 'is_staff')}),
(_('Important dates'), {'fields': ('last_login', 'date_joined')}),
(_('Grupos'), {'fields': ('groups',)}),
)
inlines = (UserProfileInLine,)
list_display = ('username', 'email', 'get_hours')
def get_hours(self, obj):
return UserProfile.objects.get(user_id=obj.id).finished_hours
get_hours.short_description = "Horas completadas"
def change_view(self, request, *args, **kwargs):
# for non-superuser
if not request.user.is_superuser:
try:
self.fieldsets = self.staff_fieldsets
response = UserAdmin.change_view(self, request, *args, **kwargs)
finally:
# Reset fieldsets to its original value
self.fieldsets = UserAdmin.fieldsets
return response
else:
return UserAdmin.change_view(self, request, *args, **kwargs)
admin.site.unregister(User)
admin.site.register(User, MyUserAdmin)
class UserApplicationAdmin(admin.ModelAdmin):
fieldsets = (
('Datos del Solicitante', {'fields': ('ID_number', 'USB_ID', 'first_name', 'last_name', 'birthdate', 'paysheet', 'type', 'sex', 'location', 'position', 'email')}),
('Solicitud', {'fields': ('request_date', 'status')}),
)
admin.site.register(UserApplication, UserApplicationAdmin)
class RemoveRequestAdmin(admin.ModelAdmin):
fieldsets = (
('Datos del Solicitante', {'fields': ('ID_number', 'USB_ID', 'first_name', 'last_name', 'email')}),
('Solicitud', {'fields': ('course_ID', 'request_type', 'request_date', 'status')}),
)
admin.site.register(RemoveRequest, RemoveRequestAdmin)
| [
"django.utils.translation.ugettext_lazy",
"django.contrib.admin.site.register",
"appProcafe.models.UserProfile.objects.get",
"django.contrib.admin.site.unregister",
"django.contrib.auth.admin.UserAdmin.change_view"
] | [((457, 488), 'django.contrib.admin.site.register', 'admin.site.register', (['Department'], {}), '(Department)\n', (476, 488), False, 'from django.contrib import admin\n'), ((490, 515), 'django.contrib.admin.site.register', 'admin.site.register', (['Unit'], {}), '(Unit)\n', (509, 515), False, 'from django.contrib import admin\n'), ((517, 545), 'django.contrib.admin.site.register', 'admin.site.register', (['Section'], {}), '(Section)\n', (536, 545), False, 'from django.contrib import admin\n'), ((547, 572), 'django.contrib.admin.site.register', 'admin.site.register', (['Type'], {}), '(Type)\n', (566, 572), False, 'from django.contrib import admin\n'), ((574, 599), 'django.contrib.admin.site.register', 'admin.site.register', (['Risk'], {}), '(Risk)\n', (593, 599), False, 'from django.contrib import admin\n'), ((601, 630), 'django.contrib.admin.site.register', 'admin.site.register', (['Paysheet'], {}), '(Paysheet)\n', (620, 630), False, 'from django.contrib import admin\n'), ((632, 661), 'django.contrib.admin.site.register', 'admin.site.register', (['Position'], {}), '(Position)\n', (651, 661), False, 'from django.contrib import admin\n'), ((663, 692), 'django.contrib.admin.site.register', 'admin.site.register', (['Location'], {}), '(Location)\n', (682, 692), False, 'from django.contrib import admin\n'), ((785, 831), 'django.contrib.admin.site.register', 'admin.site.register', (['Telephone', 'TelephoneAdmin'], {}), '(Telephone, TelephoneAdmin)\n', (804, 831), False, 'from django.contrib import admin\n'), ((945, 983), 'django.contrib.admin.site.register', 'admin.site.register', (['Takes', 'TakesAdmin'], {}), '(Takes, TakesAdmin)\n', (964, 983), False, 'from django.contrib import admin\n'), ((1102, 1142), 'django.contrib.admin.site.register', 'admin.site.register', (['Course', 'CourseAdmin'], {}), '(Course, CourseAdmin)\n', (1121, 1142), False, 'from django.contrib import admin\n'), ((2490, 2517), 'django.contrib.admin.site.unregister', 'admin.site.unregister', (['User'], {}), '(User)\n', (2511, 2517), False, 'from django.contrib import admin\n'), ((2519, 2557), 'django.contrib.admin.site.register', 'admin.site.register', (['User', 'MyUserAdmin'], {}), '(User, MyUserAdmin)\n', (2538, 2557), False, 'from django.contrib import admin\n'), ((2873, 2931), 'django.contrib.admin.site.register', 'admin.site.register', (['UserApplication', 'UserApplicationAdmin'], {}), '(UserApplication, UserApplicationAdmin)\n', (2892, 2931), False, 'from django.contrib import admin\n'), ((3210, 3264), 'django.contrib.admin.site.register', 'admin.site.register', (['RemoveRequest', 'RemoveRequestAdmin'], {}), '(RemoveRequest, RemoveRequestAdmin)\n', (3229, 3264), False, 'from django.contrib import admin\n'), ((1402, 1420), 'django.utils.translation.ugettext_lazy', '_', (['"""Personal info"""'], {}), "('Personal info')\n", (1403, 1420), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1509, 1525), 'django.utils.translation.ugettext_lazy', '_', (['"""Permissions"""'], {}), "('Permissions')\n", (1510, 1525), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1577, 1597), 'django.utils.translation.ugettext_lazy', '_', (['"""Important dates"""'], {}), "('Important dates')\n", (1578, 1597), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1653, 1664), 'django.utils.translation.ugettext_lazy', '_', (['"""Grupos"""'], {}), "('Grupos')\n", (1654, 1664), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1839, 1878), 'appProcafe.models.UserProfile.objects.get', 'UserProfile.objects.get', ([], {'user_id': 'obj.id'}), '(user_id=obj.id)\n', (1862, 1878), False, 'from appProcafe.models import User, UserProfile, UserApplication, RemoveRequest\n'), ((2433, 2486), 'django.contrib.auth.admin.UserAdmin.change_view', 'UserAdmin.change_view', (['self', 'request', '*args'], {}), '(self, request, *args, **kwargs)\n', (2454, 2486), False, 'from django.contrib.auth.admin import UserAdmin\n'), ((2182, 2235), 'django.contrib.auth.admin.UserAdmin.change_view', 'UserAdmin.change_view', (['self', 'request', '*args'], {}), '(self, request, *args, **kwargs)\n', (2203, 2235), False, 'from django.contrib.auth.admin import UserAdmin\n')] |
#!/usr/bin/env python3
from flask import Flask, render_template, request
import os,random,socket
app = Flask(__name__)
images = [
"las-01.jpg",
"las-02.jpg",
"las-03.jpg",
"las-04.jpg",
"las-05.jpg",
"las-06.jpg"
]
@app.route('/')
def index():
host_name = "{} to {}".format(socket.gethostname(), request.remote_addr)
image_path = "/static/images/" + random.choice(images)
return render_template('index.html', image_path=image_path, host_name=host_name)
# Main
if __name__ == "__main__":
port = int(os.environ.get("PORT", 80))
try:
app.run(host="0.0.0.0", port=port, debug=True)
except Exception as ex:
print(ex)
| [
"flask.render_template",
"random.choice",
"flask.Flask",
"os.environ.get",
"socket.gethostname"
] | [((105, 120), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (110, 120), False, 'from flask import Flask, render_template, request\n'), ((421, 494), 'flask.render_template', 'render_template', (['"""index.html"""'], {'image_path': 'image_path', 'host_name': 'host_name'}), "('index.html', image_path=image_path, host_name=host_name)\n", (436, 494), False, 'from flask import Flask, render_template, request\n'), ((306, 326), 'socket.gethostname', 'socket.gethostname', ([], {}), '()\n', (324, 326), False, 'import os, random, socket\n'), ((387, 408), 'random.choice', 'random.choice', (['images'], {}), '(images)\n', (400, 408), False, 'import os, random, socket\n'), ((545, 571), 'os.environ.get', 'os.environ.get', (['"""PORT"""', '(80)'], {}), "('PORT', 80)\n", (559, 571), False, 'import os, random, socket\n')] |
from os import listdir
from os.path import isfile, join
import numpy as np
import matplotlib.pyplot as plt
from magenta.models.nsynth.wavenet import fastgen
import sys
# Change path back to /src to load other modules
sys.path.insert(0, '/home/ubuntu/DeepBass/src')
from ingestion.IO_utils import Load, Save
from preprocess.SilenceRemoval import SR
import streamlit as st
import time
import math
###############################################################################
def LinearFade(length):
fadein = np.linspace(0, 1, length).reshape(1, -1, 1)
return fadein
###############################################################################
def HannFade(length):
fadein = (0.5 * (1.0 - np.cos(math.pi * np.arange(length) /
float(length)))).reshape(1, -1, 1)
return fadein
###############################################################################
def fade(encoding, fade_type, mode='in'):
length = encoding.shape[1]
method = globals().copy().get(fade_type)
if not method:
raise NotImplementedError("Fade %s not implemented" % fade_type)
fadein = method(length)
if mode == 'in':
return fadein * encoding
else:
return (1.0 - fadein) * encoding
###############################################################################
def crossfade(encoding1, encoding2, fade_type):
return fade(encoding1, fade_type, 'out') + fade(encoding2, fade_type, 'in')
###############################################################################
"""Demo of cross fading in the NSynth embedding space
User Parameters:
tlen (float): Amount of time for reconstruction
silence_len1 (float) : Skip this many seconds of the ending that is silent
silence_len2 (float) : Skip this many seconds of the beginning that is silent
AUDIO_DIR (str) : Directory of the audio files
output_dir (str) : Directory to save the reconstruction
model_dir (str) : Directory of the pretrained model (tf checkpoint)
Returns:
Streamlit notebook
Crossfaded audio in the form of a wav file
Notes:
sr must be 16 kHz per the model architecture
"""
# Directory where mp3 are stored.
AUDIO_DIR = '/home/ubuntu/test'
filenames = [f for f in listdir(AUDIO_DIR) if isfile(join(AUDIO_DIR, f))]
sr = 16000
# magenta also uses librosa for loading
FirstSong_fname = filenames[1]
SecondSong_fname = filenames[0]
FirstSong, _ = Load(AUDIO_DIR, FirstSong_fname , sr=sr)
SecondSong, _ = Load(AUDIO_DIR, SecondSong_fname, sr=sr)
# Remove any silence at the end of the first song
# and the beginning of the second song
t_snip = 30 # interrogation length in seconds
end_index = SR(FirstSong, 'end', t_snip=t_snip)
end_index = int(t_snip*sr - end_index) # change index reference frame
start_index = SR(SecondSong, 'begin', t_snip=t_snip)
FirstSong = FirstSong[:-end_index]
SecondSong = SecondSong[start_index:]
# Trim to t_len seconds
t_len = 5
sample_length = t_len*sr
FirstSong_end = FirstSong[-sample_length:]
SecondSong_begin = SecondSong[0:sample_length]
# Plot PCM of both snippets
fig, axs = plt.subplots(2, 1, figsize=(10, 5))
axs[0].plot(FirstSong_end)
axs[0].set_title('First Song')
axs[1].plot(SecondSong_begin)
axs[1].set_title('Second Song')
st.pyplot()
# Save original snippets
output_dir = '/home/ubuntu/DeepBass/src/notebooks/'
output_name1 = 'originalend_' + FirstSong_fname + '.wav'
Save(output_dir, output_name1, FirstSong_end, sr)
output_name2 = 'originalbegin_' + SecondSong_fname + '.wav'
Save(output_dir, output_name2, SecondSong_begin, sr)
model_dir = '/home/ubuntu/DeepBass/src/notebooks/wavenet-ckpt/model.ckpt-200000'
# Create encodings
start = time.time()
enc1 = fastgen.encode(FirstSong_end, model_dir, sample_length)
enc2 = fastgen.encode(SecondSong_begin, model_dir, sample_length)
end = time.time()
st.write('Encoding took ' + str((end-start)) + ' seconds')
# Create cross fading in the latent space
fade_type = 'LinearFade'
xfade_encoding = crossfade(enc1, enc2, fade_type)
fig, axs = plt.subplots(3, 1, figsize=(10, 7))
axs[0].plot(enc1[0])
axs[0].set_title('Encoding 1')
axs[1].plot(enc2[0])
axs[1].set_title('Encoding 2')
axs[2].plot(xfade_encoding[0])
axs[2].set_title('Crossfade')
st.pyplot()
start = time.time()
@st.cache
def synth():
fastgen.synthesize(xfade_encoding, checkpoint_path = model_dir,
save_paths=['enc_' + fade_type + '_' + FirstSong_fname + \
SecondSong_fname],
samples_per_save=sample_length)
return None
synth()
end = time.time()
st.write('Decoding took ' + str((end-start)) + ' seconds')
xfade_audio, _ = Load(output_dir, 'enc_' + fade_type + '_' + FirstSong_fname + \
SecondSong_fname, sr=sr)
fig, ax = plt.subplots(figsize=(10, 7))
ax.plot(xfade_audio)
ax.set_title('Crossfaded audio')
st.pyplot() | [
"magenta.models.nsynth.wavenet.fastgen.encode",
"sys.path.insert",
"streamlit.pyplot",
"os.listdir",
"os.path.join",
"preprocess.SilenceRemoval.SR",
"numpy.linspace",
"ingestion.IO_utils.Load",
"magenta.models.nsynth.wavenet.fastgen.synthesize",
"ingestion.IO_utils.Save",
"time.time",
"matplot... | [((217, 264), 'sys.path.insert', 'sys.path.insert', (['(0)', '"""/home/ubuntu/DeepBass/src"""'], {}), "(0, '/home/ubuntu/DeepBass/src')\n", (232, 264), False, 'import sys\n'), ((2443, 2482), 'ingestion.IO_utils.Load', 'Load', (['AUDIO_DIR', 'FirstSong_fname'], {'sr': 'sr'}), '(AUDIO_DIR, FirstSong_fname, sr=sr)\n', (2447, 2482), False, 'from ingestion.IO_utils import Load, Save\n'), ((2500, 2540), 'ingestion.IO_utils.Load', 'Load', (['AUDIO_DIR', 'SecondSong_fname'], {'sr': 'sr'}), '(AUDIO_DIR, SecondSong_fname, sr=sr)\n', (2504, 2540), False, 'from ingestion.IO_utils import Load, Save\n'), ((2689, 2724), 'preprocess.SilenceRemoval.SR', 'SR', (['FirstSong', '"""end"""'], {'t_snip': 't_snip'}), "(FirstSong, 'end', t_snip=t_snip)\n", (2691, 2724), False, 'from preprocess.SilenceRemoval import SR\n'), ((2809, 2847), 'preprocess.SilenceRemoval.SR', 'SR', (['SecondSong', '"""begin"""'], {'t_snip': 't_snip'}), "(SecondSong, 'begin', t_snip=t_snip)\n", (2811, 2847), False, 'from preprocess.SilenceRemoval import SR\n'), ((3111, 3146), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(2)', '(1)'], {'figsize': '(10, 5)'}), '(2, 1, figsize=(10, 5))\n', (3123, 3146), True, 'import matplotlib.pyplot as plt\n'), ((3267, 3278), 'streamlit.pyplot', 'st.pyplot', ([], {}), '()\n', (3276, 3278), True, 'import streamlit as st\n'), ((3414, 3463), 'ingestion.IO_utils.Save', 'Save', (['output_dir', 'output_name1', 'FirstSong_end', 'sr'], {}), '(output_dir, output_name1, FirstSong_end, sr)\n', (3418, 3463), False, 'from ingestion.IO_utils import Load, Save\n'), ((3524, 3576), 'ingestion.IO_utils.Save', 'Save', (['output_dir', 'output_name2', 'SecondSong_begin', 'sr'], {}), '(output_dir, output_name2, SecondSong_begin, sr)\n', (3528, 3576), False, 'from ingestion.IO_utils import Load, Save\n'), ((3687, 3698), 'time.time', 'time.time', ([], {}), '()\n', (3696, 3698), False, 'import time\n'), ((3706, 3761), 'magenta.models.nsynth.wavenet.fastgen.encode', 'fastgen.encode', (['FirstSong_end', 'model_dir', 'sample_length'], {}), '(FirstSong_end, model_dir, sample_length)\n', (3720, 3761), False, 'from magenta.models.nsynth.wavenet import fastgen\n'), ((3769, 3827), 'magenta.models.nsynth.wavenet.fastgen.encode', 'fastgen.encode', (['SecondSong_begin', 'model_dir', 'sample_length'], {}), '(SecondSong_begin, model_dir, sample_length)\n', (3783, 3827), False, 'from magenta.models.nsynth.wavenet import fastgen\n'), ((3834, 3845), 'time.time', 'time.time', ([], {}), '()\n', (3843, 3845), False, 'import time\n'), ((4035, 4070), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(3)', '(1)'], {'figsize': '(10, 7)'}), '(3, 1, figsize=(10, 7))\n', (4047, 4070), True, 'import matplotlib.pyplot as plt\n'), ((4236, 4247), 'streamlit.pyplot', 'st.pyplot', ([], {}), '()\n', (4245, 4247), True, 'import streamlit as st\n'), ((4257, 4268), 'time.time', 'time.time', ([], {}), '()\n', (4266, 4268), False, 'import time\n'), ((4583, 4594), 'time.time', 'time.time', ([], {}), '()\n', (4592, 4594), False, 'import time\n'), ((4672, 4762), 'ingestion.IO_utils.Load', 'Load', (['output_dir', "('enc_' + fade_type + '_' + FirstSong_fname + SecondSong_fname)"], {'sr': 'sr'}), "(output_dir, 'enc_' + fade_type + '_' + FirstSong_fname +\n SecondSong_fname, sr=sr)\n", (4676, 4762), False, 'from ingestion.IO_utils import Load, Save\n'), ((4793, 4822), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(10, 7)'}), '(figsize=(10, 7))\n', (4805, 4822), True, 'import matplotlib.pyplot as plt\n'), ((4877, 4888), 'streamlit.pyplot', 'st.pyplot', ([], {}), '()\n', (4886, 4888), True, 'import streamlit as st\n'), ((4296, 4474), 'magenta.models.nsynth.wavenet.fastgen.synthesize', 'fastgen.synthesize', (['xfade_encoding'], {'checkpoint_path': 'model_dir', 'save_paths': "['enc_' + fade_type + '_' + FirstSong_fname + SecondSong_fname]", 'samples_per_save': 'sample_length'}), "(xfade_encoding, checkpoint_path=model_dir, save_paths=[\n 'enc_' + fade_type + '_' + FirstSong_fname + SecondSong_fname],\n samples_per_save=sample_length)\n", (4314, 4474), False, 'from magenta.models.nsynth.wavenet import fastgen\n'), ((2263, 2281), 'os.listdir', 'listdir', (['AUDIO_DIR'], {}), '(AUDIO_DIR)\n', (2270, 2281), False, 'from os import listdir\n'), ((514, 539), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', 'length'], {}), '(0, 1, length)\n', (525, 539), True, 'import numpy as np\n'), ((2292, 2310), 'os.path.join', 'join', (['AUDIO_DIR', 'f'], {}), '(AUDIO_DIR, f)\n', (2296, 2310), False, 'from os.path import isfile, join\n'), ((728, 745), 'numpy.arange', 'np.arange', (['length'], {}), '(length)\n', (737, 745), True, 'import numpy as np\n')] |
import os
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
USER = os.getenv("DB_USER")
PASS = os.getenv("DB_PASSWORD")
HOST = os.getenv("DB_HOST")
PORT = os.getenv("DB_PORT")
SCHEMA = os.getenv("DB_SCHEMA")
SQLALCHEMY_DATABASE_URL = f"postgresql+psycopg2://{USER}:{PASS}@{HOST}:{PORT}/{SCHEMA}"
engine = create_engine(SQLALCHEMY_DATABASE_URL)
Session = sessionmaker(bind=engine, autoflush=False, autocommit=False)
Base = declarative_base()
def get_session():
session = Session()
try:
yield session
finally:
session.close()
| [
"sqlalchemy.orm.sessionmaker",
"sqlalchemy.ext.declarative.declarative_base",
"sqlalchemy.create_engine",
"os.getenv"
] | [((152, 172), 'os.getenv', 'os.getenv', (['"""DB_USER"""'], {}), "('DB_USER')\n", (161, 172), False, 'import os\n'), ((180, 204), 'os.getenv', 'os.getenv', (['"""DB_PASSWORD"""'], {}), "('DB_PASSWORD')\n", (189, 204), False, 'import os\n'), ((212, 232), 'os.getenv', 'os.getenv', (['"""DB_HOST"""'], {}), "('DB_HOST')\n", (221, 232), False, 'import os\n'), ((240, 260), 'os.getenv', 'os.getenv', (['"""DB_PORT"""'], {}), "('DB_PORT')\n", (249, 260), False, 'import os\n'), ((270, 292), 'os.getenv', 'os.getenv', (['"""DB_SCHEMA"""'], {}), "('DB_SCHEMA')\n", (279, 292), False, 'import os\n'), ((391, 429), 'sqlalchemy.create_engine', 'create_engine', (['SQLALCHEMY_DATABASE_URL'], {}), '(SQLALCHEMY_DATABASE_URL)\n', (404, 429), False, 'from sqlalchemy import create_engine\n'), ((441, 501), 'sqlalchemy.orm.sessionmaker', 'sessionmaker', ([], {'bind': 'engine', 'autoflush': '(False)', 'autocommit': '(False)'}), '(bind=engine, autoflush=False, autocommit=False)\n', (453, 501), False, 'from sqlalchemy.orm import sessionmaker\n'), ((510, 528), 'sqlalchemy.ext.declarative.declarative_base', 'declarative_base', ([], {}), '()\n', (526, 528), False, 'from sqlalchemy.ext.declarative import declarative_base\n')] |
#!/usr/bin/env python
import sys
sys.path.append("..")
import json
import click
import datetime
import os
import shutil
from zipfile import ZipFile
from tempfile import mkdtemp
from scale_mesh import scale_mesh
from wearebeautiful.bundles import validate_manifest, MAX_SCREENSHOT_SIZE
from wearebeautiful import model_params as param
@click.command()
@click.option('--invert/--no-invert', default=False, help='Flip the normals on the STL file')
@click.argument('lresolution', type=float)
@click.argument('mresolution', type=float)
def main(invert, lresolution, mresolution):
manifest = os.path.join("/src", param.MANIFEST_FILE)
surface = os.path.join("/src", param.SURFACE_FILE)
solid = os.path.join("/src", param.PRINT_FILE)
screenshot = os.path.join("/src", param.SCREENSHOT_FILE)
try:
with open(os.path.join("/src", manifest), "rb") as m:
j = m.read()
jmanifest = json.loads(j)
except json.decoder.JSONDecodeError as err:
print("Cannot parse manifest file. ", err)
print("This was the content that was read:")
print("%s\n" % j)
sys.exit(-1)
except IOError as err:
print("Cannot read manifest file. IO error.", err)
sys.exit(-1)
err = validate_manifest(jmanifest)
if err:
print(err)
sys.exit(-1)
id = jmanifest['id']
screenshot_size = os.path.getsize(screenshot)
if screenshot_size > MAX_SCREENSHOT_SIZE:
print("Maximum screenshot size is %d kbytes." % (MAX_SCREENSHOT_SIZE / 1024))
sys.exit(-1)
tmp_dir = mkdtemp()
low_res = os.path.join(tmp_dir, "surface-low.stl")
medium_res = os.path.join(tmp_dir, "surface-medium.stl")
if not os.path.exists(solid):
print("Cannot find solid.stl");
sys.exit(-1)
if not os.path.exists(surface):
print("Cannot find surface.stl");
sys.exit(-1)
try:
print("invert mesh", invert)
scale_mesh(invert, lresolution, surface, low_res)
scale_mesh(invert, mresolution, surface, medium_res)
except IOError as err:
print("Cannot down-scale mesh files. Error: ", err)
sys.exit(-1)
try:
shutil.copy(solid, tmp_dir)
shutil.copy(surface, tmp_dir)
shutil.copy(screenshot, tmp_dir)
except IOError as err:
print("Cannot copy files. Error: ", err)
sys.exit(-1)
dest = os.path.join("/dest", "%s-%s-%s-bundle.zip" % (id, jmanifest['bodypart'], jmanifest['pose']))
with ZipFile(dest, 'w') as zip:
zip.write(manifest, arcname="manifest.json")
zip.write(low_res, arcname="surface-low.stl")
zip.write(medium_res, arcname="surface-medium.stl")
zip.write(solid, arcname="solid.stl")
zip.write(surface, arcname="surface-orig.stl")
zip.write(screenshot, arcname="screenshot.jpg")
shutil.rmtree(tmp_dir)
if __name__ == "__main__":
main();
sys.exit(-1)
| [
"os.path.exists",
"click.argument",
"wearebeautiful.bundles.validate_manifest",
"os.path.getsize",
"sys.exit",
"zipfile.ZipFile",
"json.loads",
"click.option",
"os.path.join",
"tempfile.mkdtemp",
"scale_mesh.scale_mesh",
"shutil.rmtree",
"shutil.copy",
"click.command",
"sys.path.append"
... | [((34, 55), 'sys.path.append', 'sys.path.append', (['""".."""'], {}), "('..')\n", (49, 55), False, 'import sys\n'), ((338, 353), 'click.command', 'click.command', ([], {}), '()\n', (351, 353), False, 'import click\n'), ((355, 452), 'click.option', 'click.option', (['"""--invert/--no-invert"""'], {'default': '(False)', 'help': '"""Flip the normals on the STL file"""'}), "('--invert/--no-invert', default=False, help=\n 'Flip the normals on the STL file')\n", (367, 452), False, 'import click\n'), ((449, 490), 'click.argument', 'click.argument', (['"""lresolution"""'], {'type': 'float'}), "('lresolution', type=float)\n", (463, 490), False, 'import click\n'), ((492, 533), 'click.argument', 'click.argument', (['"""mresolution"""'], {'type': 'float'}), "('mresolution', type=float)\n", (506, 533), False, 'import click\n'), ((594, 635), 'os.path.join', 'os.path.join', (['"""/src"""', 'param.MANIFEST_FILE'], {}), "('/src', param.MANIFEST_FILE)\n", (606, 635), False, 'import os\n'), ((650, 690), 'os.path.join', 'os.path.join', (['"""/src"""', 'param.SURFACE_FILE'], {}), "('/src', param.SURFACE_FILE)\n", (662, 690), False, 'import os\n'), ((703, 741), 'os.path.join', 'os.path.join', (['"""/src"""', 'param.PRINT_FILE'], {}), "('/src', param.PRINT_FILE)\n", (715, 741), False, 'import os\n'), ((759, 802), 'os.path.join', 'os.path.join', (['"""/src"""', 'param.SCREENSHOT_FILE'], {}), "('/src', param.SCREENSHOT_FILE)\n", (771, 802), False, 'import os\n'), ((1256, 1284), 'wearebeautiful.bundles.validate_manifest', 'validate_manifest', (['jmanifest'], {}), '(jmanifest)\n', (1273, 1284), False, 'from wearebeautiful.bundles import validate_manifest, MAX_SCREENSHOT_SIZE\n'), ((1386, 1413), 'os.path.getsize', 'os.path.getsize', (['screenshot'], {}), '(screenshot)\n', (1401, 1413), False, 'import os\n'), ((1582, 1591), 'tempfile.mkdtemp', 'mkdtemp', ([], {}), '()\n', (1589, 1591), False, 'from tempfile import mkdtemp\n'), ((1606, 1646), 'os.path.join', 'os.path.join', (['tmp_dir', '"""surface-low.stl"""'], {}), "(tmp_dir, 'surface-low.stl')\n", (1618, 1646), False, 'import os\n'), ((1664, 1707), 'os.path.join', 'os.path.join', (['tmp_dir', '"""surface-medium.stl"""'], {}), "(tmp_dir, 'surface-medium.stl')\n", (1676, 1707), False, 'import os\n'), ((2412, 2509), 'os.path.join', 'os.path.join', (['"""/dest"""', "('%s-%s-%s-bundle.zip' % (id, jmanifest['bodypart'], jmanifest['pose']))"], {}), "('/dest', '%s-%s-%s-bundle.zip' % (id, jmanifest['bodypart'],\n jmanifest['pose']))\n", (2424, 2509), False, 'import os\n'), ((2871, 2893), 'shutil.rmtree', 'shutil.rmtree', (['tmp_dir'], {}), '(tmp_dir)\n', (2884, 2893), False, 'import shutil\n'), ((2939, 2951), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (2947, 2951), False, 'import sys\n'), ((1324, 1336), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (1332, 1336), False, 'import sys\n'), ((1554, 1566), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (1562, 1566), False, 'import sys\n'), ((1720, 1741), 'os.path.exists', 'os.path.exists', (['solid'], {}), '(solid)\n', (1734, 1741), False, 'import os\n'), ((1791, 1803), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (1799, 1803), False, 'import sys\n'), ((1816, 1839), 'os.path.exists', 'os.path.exists', (['surface'], {}), '(surface)\n', (1830, 1839), False, 'import os\n'), ((1891, 1903), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (1899, 1903), False, 'import sys\n'), ((1959, 2008), 'scale_mesh.scale_mesh', 'scale_mesh', (['invert', 'lresolution', 'surface', 'low_res'], {}), '(invert, lresolution, surface, low_res)\n', (1969, 2008), False, 'from scale_mesh import scale_mesh\n'), ((2017, 2069), 'scale_mesh.scale_mesh', 'scale_mesh', (['invert', 'mresolution', 'surface', 'medium_res'], {}), '(invert, mresolution, surface, medium_res)\n', (2027, 2069), False, 'from scale_mesh import scale_mesh\n'), ((2196, 2223), 'shutil.copy', 'shutil.copy', (['solid', 'tmp_dir'], {}), '(solid, tmp_dir)\n', (2207, 2223), False, 'import shutil\n'), ((2232, 2261), 'shutil.copy', 'shutil.copy', (['surface', 'tmp_dir'], {}), '(surface, tmp_dir)\n', (2243, 2261), False, 'import shutil\n'), ((2270, 2302), 'shutil.copy', 'shutil.copy', (['screenshot', 'tmp_dir'], {}), '(screenshot, tmp_dir)\n', (2281, 2302), False, 'import shutil\n'), ((2515, 2533), 'zipfile.ZipFile', 'ZipFile', (['dest', '"""w"""'], {}), "(dest, 'w')\n", (2522, 2533), False, 'from zipfile import ZipFile\n'), ((924, 937), 'json.loads', 'json.loads', (['j'], {}), '(j)\n', (934, 937), False, 'import json\n'), ((1125, 1137), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (1133, 1137), False, 'import sys\n'), ((1232, 1244), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (1240, 1244), False, 'import sys\n'), ((2165, 2177), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (2173, 2177), False, 'import sys\n'), ((2387, 2399), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (2395, 2399), False, 'import sys\n'), ((831, 861), 'os.path.join', 'os.path.join', (['"""/src"""', 'manifest'], {}), "('/src', manifest)\n", (843, 861), False, 'import os\n')] |
"""
Adapted from https://github.com/hovinh/DeCNN
"""
import numpy as np
from keras import backend as K
class Backpropagation():
def __init__(self, model, layer_name, input_data, layer_idx=None, masking=None):
"""
@params:
- model: a Keras Model.
- layer_name: name of layer to be backpropagated, can be determined by
model.layers[layer_idx].name.
- input_data: a input data to be inspected, must be in proper format
to be able to be fed into model.
- layer_idx: equivalent to layer_name.
- masking: determine which units in the chosen layer to be backpropagated,
a numpy array with the same shape with chosen layer.
"""
self.model = model
self.layer_name = layer_name
self.layer = model.get_layer(layer_name)
self.input_data = input_data
if layer_idx is None:
for i, layer in enumerate(self.model.layers):
if layer.name == self.layer_name:
self.layer_idx = i
break
if masking is None:
shape = [1] + list(self.layer.output_shape[1:])
masking = np.ones(shape, 'float32')
self.masking = masking
def compute(self):
"""
@returns:
- output_data: obtained heatmap.
- func: a reuseable function to compute backpropagation in the same setting.
"""
loss = K.mean(self.layer.output * self.masking)
gradients = K.gradients(loss, self.model.input)[0]
func = K.function([self.model.input], [gradients])
output_data = func([self.input_data])[0]
output_data = self.filter_gradient(output_data)
return output_data, func
def filter_gradient(self, x):
"""
The gradients to be visualize has non-negative value.
"""
x_abs = np.abs(x)
x_max = np.amax(x_abs, axis=-1)
return x_max
class SmoothGrad(Backpropagation):
def __init__(self, model, layer_name, input_data, layer_idx=None, masking=None):
"""
For parameters, please refer to Backpropagation()
"""
super(SmoothGrad, self).__init__(model, layer_name, input_data, layer_idx, masking)
def compute(self, n_samples=50, batch_size=4):
"""
@params:
- n_samples: number of random sampled to be injected noise and taken average.
- batch_size: must be <= n_samples. If n_samples is too big, there may be there
are not enough memories to compute, hence we have to proceed them iteratively
batch-by-batch.
@returns:
- smooth_gradients: obtained heatmap.
"""
_, func = super().compute()
shape = [n_samples] + list(self.model.input.shape[1:])
new_gradients = np.zeros(shape)
for start_idx in range(0, n_samples, batch_size):
if n_samples >= start_idx + batch_size:
end_idx = start_idx + batch_size
else:
end_idx = n_samples
shape = [end_idx - start_idx] + list(self.model.input.shape[1:])
random_noise = np.random.random(shape)
# random_noise = np.random.random(self.input_data.shape)
new_images = random_noise + self.input_data
gradients = func([new_images])[0]
new_gradients[start_idx:end_idx, ...] = gradients
smooth_gradients = np.expand_dims(np.mean(new_gradients, axis=0), axis=0)
smooth_gradients = self.filter_gradient(smooth_gradients)
return smooth_gradients
class GuidedBackprop(Backpropagation):
def __init__(self, model, layer_name, input_data, layer_idx=None, masking=None):
"""
For parameters, please refer to Backpropagation()
"""
super(GuidedBackprop, self).__init__(model, layer_name, input_data, layer_idx, masking)
def compute(self):
"""
@returns:
- gradients_input: obtained heatmap.
"""
forward_values = [self.input_data] + self.feed_forward()
forward_values_dict = {self.model.layers[i].name: forward_values[i] for i in range(self.layer_idx + 1)}
gradients = self.masking
for layer_idx in range(self.layer_idx - 1, -1, -1):
layer_cur = self.model.layers[layer_idx + 1].output
layer_prev = self.model.layers[layer_idx].output
layer_prev_name = self.model.layers[layer_idx].name
gradients_cur = gradients
gate_b = (gradients_cur > 0.) * gradients_cur
gradients = self.guided_backprop_adjacent(layer_cur,
layer_prev,
forward_values_dict[layer_prev_name],
gate_b)
if gradients.min() != gradients.max():
gradients = self.normalize_gradient(gradients)
gradients_input = gradients
gradients_input = self.filter_gradient(gradients_input)
return gradients_input
def guided_backprop_adjacent(self, layer_cur, layer_prev, values_prev, gate_b):
loss = K.mean(layer_cur * gate_b)
gradients = K.gradients(loss, layer_prev)[0]
gate_f = K.cast(values_prev > 0., 'float32')
guided_gradients = gradients * gate_f
func = K.function([self.model.input], [guided_gradients])
output_data = func([self.input_data])[0]
return output_data
def feed_forward(self):
forward_layers = [layer.output for layer in self.model.layers[1:self.layer_idx + 1]]
func = K.function([self.model.input], forward_layers)
self.forward_values = func([self.input_data])
return self.forward_values
def normalize_gradient(self, img):
"""
Gradients computed tend to become pretty small, especially after many layers.
So after each layer, we will multiply them with a constant to keep them in acceptable
range (if applicable).
"""
gap = img.max() - img.min()
if abs(gap) > 1.:
return img
amplitude = 1. / gap
img *= amplitude
return img
class DeconvNet(GuidedBackprop):
def __init__(self, model, layer_name, input_data, layer_idx=None, masking=None):
"""
For parameters, please refer to Backpropagation()
"""
super(DeconvNet, self).__init__(model, layer_name, input_data, layer_idx, masking)
def compute(self):
"""
@returns:
- gradients_input: obtained heatmap.
"""
gradients = self.masking
for layer_idx in range(self.layer_idx - 1, -1, -1):
layer_prev = self.model.layers[layer_idx].output
layer_cur = self.model.layers[layer_idx + 1].output
forward_values_prev = np.ones(
[self.input_data.shape[0]] + list(self.model.layers[layer_idx].output_shape[1:]))
gradients_cur = gradients
gate_b = (gradients_cur > 0.) * gradients_cur
gradients = self.guided_backprop_adjacent(layer_cur,
layer_prev,
forward_values_prev,
gate_b)
if gradients.min() != gradients.max():
gradients = self.normalize_gradient(gradients)
gradients_input = gradients
gradients_input = self.filter_gradient(gradients_input)
return gradients_input
| [
"numpy.abs",
"numpy.mean",
"keras.backend.cast",
"numpy.ones",
"numpy.random.random",
"keras.backend.mean",
"keras.backend.gradients",
"numpy.zeros",
"keras.backend.function",
"numpy.amax"
] | [((1485, 1525), 'keras.backend.mean', 'K.mean', (['(self.layer.output * self.masking)'], {}), '(self.layer.output * self.masking)\n', (1491, 1525), True, 'from keras import backend as K\n'), ((1600, 1643), 'keras.backend.function', 'K.function', (['[self.model.input]', '[gradients]'], {}), '([self.model.input], [gradients])\n', (1610, 1643), True, 'from keras import backend as K\n'), ((1920, 1929), 'numpy.abs', 'np.abs', (['x'], {}), '(x)\n', (1926, 1929), True, 'import numpy as np\n'), ((1946, 1969), 'numpy.amax', 'np.amax', (['x_abs'], {'axis': '(-1)'}), '(x_abs, axis=-1)\n', (1953, 1969), True, 'import numpy as np\n'), ((2874, 2889), 'numpy.zeros', 'np.zeros', (['shape'], {}), '(shape)\n', (2882, 2889), True, 'import numpy as np\n'), ((5257, 5283), 'keras.backend.mean', 'K.mean', (['(layer_cur * gate_b)'], {}), '(layer_cur * gate_b)\n', (5263, 5283), True, 'from keras import backend as K\n'), ((5354, 5390), 'keras.backend.cast', 'K.cast', (['(values_prev > 0.0)', '"""float32"""'], {}), "(values_prev > 0.0, 'float32')\n", (5360, 5390), True, 'from keras import backend as K\n'), ((5452, 5502), 'keras.backend.function', 'K.function', (['[self.model.input]', '[guided_gradients]'], {}), '([self.model.input], [guided_gradients])\n', (5462, 5502), True, 'from keras import backend as K\n'), ((5716, 5762), 'keras.backend.function', 'K.function', (['[self.model.input]', 'forward_layers'], {}), '([self.model.input], forward_layers)\n', (5726, 5762), True, 'from keras import backend as K\n'), ((1212, 1237), 'numpy.ones', 'np.ones', (['shape', '"""float32"""'], {}), "(shape, 'float32')\n", (1219, 1237), True, 'import numpy as np\n'), ((1546, 1581), 'keras.backend.gradients', 'K.gradients', (['loss', 'self.model.input'], {}), '(loss, self.model.input)\n', (1557, 1581), True, 'from keras import backend as K\n'), ((3210, 3233), 'numpy.random.random', 'np.random.random', (['shape'], {}), '(shape)\n', (3226, 3233), True, 'import numpy as np\n'), ((3510, 3540), 'numpy.mean', 'np.mean', (['new_gradients'], {'axis': '(0)'}), '(new_gradients, axis=0)\n', (3517, 3540), True, 'import numpy as np\n'), ((5304, 5333), 'keras.backend.gradients', 'K.gradients', (['loss', 'layer_prev'], {}), '(loss, layer_prev)\n', (5315, 5333), True, 'from keras import backend as K\n')] |
import torch
import numpy as np
from rlbot.agents.base_agent import SimpleControllerState, BaseAgent
class OutputFormatter():
"""
A class to format model output
"""
def transform_action(self, action):
"""
Transforms the action into a controller state.
"""
action = action[0].detach().cpu().numpy()
# Convert the last 3 actions to their boolean values
action = np.concatenate((action[:5], (action[5:] >= 0)), axis = 0)
controller_out = BaseAgent.convert_output_to_v4(self, action)
return controller_out
def transform_output(self, model_output):
"""
Transforms the output to the new controller state and the action or
state value.
"""
action, val = model_output
action = self.transform_action(action)
val = val.detach()
return action, val
@staticmethod
def action_space():
"""
Returns the number of output actions.
"""
return 8
class RecurrentOutputFormatter(OutputFormatter):
def transform_action(self, action):
return OutputFormatter.transform_action(self, action[0]) | [
"rlbot.agents.base_agent.BaseAgent.convert_output_to_v4",
"numpy.concatenate"
] | [((428, 481), 'numpy.concatenate', 'np.concatenate', (['(action[:5], action[5:] >= 0)'], {'axis': '(0)'}), '((action[:5], action[5:] >= 0), axis=0)\n', (442, 481), True, 'import numpy as np\n'), ((512, 556), 'rlbot.agents.base_agent.BaseAgent.convert_output_to_v4', 'BaseAgent.convert_output_to_v4', (['self', 'action'], {}), '(self, action)\n', (542, 556), False, 'from rlbot.agents.base_agent import SimpleControllerState, BaseAgent\n')] |
import discord
from discord.ext import commands
import re
from .errors import BadGameArgument
from dateutil.relativedelta import relativedelta
import datetime
import parsedatetime as pdt
import typing
import operator
__all__ = (
'CommandConverter',
'dice_roll',
'board_coords',
'espeak_params',
'ShortTime',
'HumanTime',
'Time',
'FutureTime',
'PastTime',
'ShortPastTime'
)
class CommandConverter(commands.Command):
@classmethod
async def convert(cls, ctx, argument: str):
cmd = ctx.bot.get_command(argument)
if cmd is None:
raise commands.CommandNotFound(argument)
return cmd
def dice_roll(argument: str):
match = re.match(r'(?P<count>\d+)?(d(?P<sides>\d+))?', argument)
if match is None:
raise ValueError
count = int(match['count'] or 1)
sides = int(match['sides'] or 6)
assert 1 <= count <= 200 and 2 <= sides <= 100
return count, sides
def board_coords(minx=1, maxx=5, miny=1, maxy=5):
def real_converter(argument: typing.Union[str, tuple]):
if isinstance(argument, tuple):
return argument
try:
argument = argument.lower()
if argument.startswith(tuple('abcde')):
y = ord(argument[0]) - 0x60
x = int(argument[1])
else:
y, x = map(int, argument.split())
assert minx <= x <= maxx and miny <= y <= maxy
return x - 1, y - 1
except (ValueError, AssertionError, IndexError) as e:
raise BadGameArgument from e
return real_converter
def espeak_params(**valid_keys):
def real_converter(argument: str) -> tuple[str, typing.Union[int, str]]:
if isinstance(argument, str):
# Convert from a string
key, value = argument.split('=')
value = valid_keys[key](value)
else:
# Make sure this is an iterable of length 2
key, value = argument
return key, value
return real_converter
class ShortTime:
compiled = re.compile("""(?:(?P<years>[0-9])(?:years?|y))? # e.g. 2y
(?:(?P<months>[0-9]{1,2})(?:months?|mo))? # e.g. 2months
(?:(?P<weeks>[0-9]{1,4})(?:weeks?|w))? # e.g. 10w
(?:(?P<days>[0-9]{1,5})(?:days?|d))? # e.g. 14d
(?:(?P<hours>[0-9]{1,5})(?:hours?|h))? # e.g. 12h
(?:(?P<minutes>[0-9]{1,5})(?:minutes?|m))? # e.g. 10m
(?:(?P<seconds>[0-9]{1,5})(?:seconds?|s))? # e.g. 15s
""", re.VERBOSE)
init_op = staticmethod(operator.add)
def __init__(
self,
argument: str,
*,
now: datetime.datetime = None
):
match = self.compiled.fullmatch(argument)
if match is None or not match.group(0):
raise commands.BadArgument('invalid time provided')
data = {k: int(v) for k, v in match.groupdict(default='0').items()}
now = now or datetime.datetime.utcnow()
self.dt = self.init_op(now, relativedelta(**data))
@classmethod
async def convert(cls, ctx, argument: str):
return cls(argument, now=ctx.message.created_at)
class HumanTime:
calendar = pdt.Calendar(version=pdt.VERSION_CONTEXT_STYLE)
def __init__(self, argument: str, *, now: datetime.datetime = None):
now = now or datetime.datetime.utcnow()
dt, status = self.calendar.parseDT(argument, sourceTime=now)
if not status.hasDateOrTime:
raise commands.BadArgument('invalid time provided, try e.g. "tomorrow" or "3 days"')
if not status.hasTime:
# replace it with the current time
dt = dt.replace(hour=now.hour, minute=now.minute, second=now.second, microsecond=now.microsecond)
self.dt = dt
self._past = dt < now
@classmethod
async def convert(cls, ctx, argument: str):
return cls(argument, now=ctx.message.created_at)
class Time(HumanTime):
def __init__(self, argument: str, *, now: datetime.datetime = None):
try:
o = ShortTime(argument, now=now)
except Exception as e:
super().__init__(argument, now=now)
else:
self.dt = o.dt
self._past = False
class FutureTime(Time):
def __init__(self, argument: str, *, now: datetime.datetime = None):
super().__init__(argument, now=now)
if self._past:
raise commands.BadArgument('this time is in the past')
class ShortPastTime(ShortTime):
init_op = staticmethod(operator.sub)
class PastTime(HumanTime):
def __init__(self, argument: str, *, now: datetime.datetime = None):
try:
o = ShortPastTime(argument, now=now)
except Exception as e:
super().__init__(argument, now=now)
else:
self.dt = o.dt
self._past = True
if not self._past:
raise commands.BadArgument('That time is in the future') | [
"dateutil.relativedelta.relativedelta",
"re.compile",
"discord.ext.commands.CommandNotFound",
"datetime.datetime.utcnow",
"re.match",
"parsedatetime.Calendar",
"discord.ext.commands.BadArgument"
] | [((745, 802), 're.match', 're.match', (['"""(?P<count>\\\\d+)?(d(?P<sides>\\\\d+))?"""', 'argument'], {}), "('(?P<count>\\\\d+)?(d(?P<sides>\\\\d+))?', argument)\n", (753, 802), False, 'import re\n'), ((2159, 2801), 're.compile', 're.compile', (['"""(?:(?P<years>[0-9])(?:years?|y))? # e.g. 2y\n (?:(?P<months>[0-9]{1,2})(?:months?|mo))? # e.g. 2months\n (?:(?P<weeks>[0-9]{1,4})(?:weeks?|w))? # e.g. 10w\n (?:(?P<days>[0-9]{1,5})(?:days?|d))? # e.g. 14d\n (?:(?P<hours>[0-9]{1,5})(?:hours?|h))? # e.g. 12h\n (?:(?P<minutes>[0-9]{1,5})(?:minutes?|m))? # e.g. 10m\n (?:(?P<seconds>[0-9]{1,5})(?:seconds?|s))? # e.g. 15s\n """', 're.VERBOSE'], {}), '(\n """(?:(?P<years>[0-9])(?:years?|y))? # e.g. 2y\n (?:(?P<months>[0-9]{1,2})(?:months?|mo))? # e.g. 2months\n (?:(?P<weeks>[0-9]{1,4})(?:weeks?|w))? # e.g. 10w\n (?:(?P<days>[0-9]{1,5})(?:days?|d))? # e.g. 14d\n (?:(?P<hours>[0-9]{1,5})(?:hours?|h))? # e.g. 12h\n (?:(?P<minutes>[0-9]{1,5})(?:minutes?|m))? # e.g. 10m\n (?:(?P<seconds>[0-9]{1,5})(?:seconds?|s))? # e.g. 15s\n """\n , re.VERBOSE)\n', (2169, 2801), False, 'import re\n'), ((3496, 3543), 'parsedatetime.Calendar', 'pdt.Calendar', ([], {'version': 'pdt.VERSION_CONTEXT_STYLE'}), '(version=pdt.VERSION_CONTEXT_STYLE)\n', (3508, 3543), True, 'import parsedatetime as pdt\n'), ((642, 676), 'discord.ext.commands.CommandNotFound', 'commands.CommandNotFound', (['argument'], {}), '(argument)\n', (666, 676), False, 'from discord.ext import commands\n'), ((3097, 3142), 'discord.ext.commands.BadArgument', 'commands.BadArgument', (['"""invalid time provided"""'], {}), "('invalid time provided')\n", (3117, 3142), False, 'from discord.ext import commands\n'), ((3244, 3270), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (3268, 3270), False, 'import datetime\n'), ((3308, 3329), 'dateutil.relativedelta.relativedelta', 'relativedelta', ([], {}), '(**data)\n', (3321, 3329), False, 'from dateutil.relativedelta import relativedelta\n'), ((3642, 3668), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (3666, 3668), False, 'import datetime\n'), ((3796, 3874), 'discord.ext.commands.BadArgument', 'commands.BadArgument', (['"""invalid time provided, try e.g. "tomorrow" or "3 days\\""""'], {}), '(\'invalid time provided, try e.g. "tomorrow" or "3 days"\')\n', (3816, 3874), False, 'from discord.ext import commands\n'), ((4761, 4809), 'discord.ext.commands.BadArgument', 'commands.BadArgument', (['"""this time is in the past"""'], {}), "('this time is in the past')\n", (4781, 4809), False, 'from discord.ext import commands\n'), ((5263, 5313), 'discord.ext.commands.BadArgument', 'commands.BadArgument', (['"""That time is in the future"""'], {}), "('That time is in the future')\n", (5283, 5313), False, 'from discord.ext import commands\n')] |
# coding: utf-8
from __future__ import absolute_import, print_function, unicode_literals
from datetime import datetime
import time
from fabric.api import cd, sudo, run as fabrun, env, settings, abort, hide
env.use_ssh_config = True
env.ssh_config_path = "ssh.config"
CONTAINER_PORT = {
"blue": "4567",
"green": "4568",
}
RUN_FORMAT = "docker run -d -p {}:4567 -e SERVER_COLOR={} --name {} -t server"
def production():
env.hosts = ["default"]
def build():
with cd("/src"):
sudo("docker build -t server:latest .")
def deploy():
prepare()
run()
switch()
stop()
def prepare():
result = sudo("ls /etc/nginx/switch/blue", warn_only=True)
if result.succeeded:
env.current = "blue"
env.next = "green"
else:
env.current = "green"
env.next = "blue"
result = sudo("docker ps | grep server | awk '{print $1}'")
if result.succeeded:
env.current_container_id = result.stdout
def _curl(port):
endpoint = "http://localhost:{}".format(port)
with hide('running', 'stdout', 'stderr', 'warnings'):
return fabrun(
"curl -LI {} -o /dev/null -w '%{{http_code}}\n' -s".format(
endpoint))
def run():
server_name = "{}_{}".format(
env.next, datetime.now().strftime("%Y%m%d%H%M%S"))
run_result = sudo(RUN_FORMAT.format(
CONTAINER_PORT[env.next], env.next, server_name
))
with settings(warn_only=True, quiet=True):
for i in range(5):
result = _curl(CONTAINER_PORT[env.next])
if result.stdout == "200":
break
time.sleep(.5)
if result.stdout != "200":
stop(run_result.stdout)
abort("request failed.")
def switch():
sudo("mkdir -p /etc/nginx/switch")
sudo("touch /etc/nginx/switch/{}".format(env.next))
sudo("rm -f /etc/nginx/switch/{}".format(env.current))
def stop(container_id=None):
if container_id is None:
container_id = env.current_container_id
if container_id:
sudo("docker stop {}".format(container_id))
def ps(run_only="true"):
if run_only in ["true", "True"]:
sudo("docker ps")
else:
sudo("docker ps -a")
def images():
sudo("docker images")
def cleancontainer():
sudo("docker rm `docker ps -a -q`", warn_only=True)
def removeimage():
sudo("docker rmi $(docker images | awk '/^<none>/ { print $3 }')", warn_only=True)
| [
"fabric.api.cd",
"time.sleep",
"fabric.api.sudo",
"datetime.datetime.now",
"fabric.api.settings",
"fabric.api.hide",
"fabric.api.abort"
] | [((639, 688), 'fabric.api.sudo', 'sudo', (['"""ls /etc/nginx/switch/blue"""'], {'warn_only': '(True)'}), "('ls /etc/nginx/switch/blue', warn_only=True)\n", (643, 688), False, 'from fabric.api import cd, sudo, run as fabrun, env, settings, abort, hide\n'), ((850, 900), 'fabric.api.sudo', 'sudo', (['"""docker ps | grep server | awk \'{print $1}\'"""'], {}), '("docker ps | grep server | awk \'{print $1}\'")\n', (854, 900), False, 'from fabric.api import cd, sudo, run as fabrun, env, settings, abort, hide\n'), ((1767, 1801), 'fabric.api.sudo', 'sudo', (['"""mkdir -p /etc/nginx/switch"""'], {}), "('mkdir -p /etc/nginx/switch')\n", (1771, 1801), False, 'from fabric.api import cd, sudo, run as fabrun, env, settings, abort, hide\n'), ((2248, 2269), 'fabric.api.sudo', 'sudo', (['"""docker images"""'], {}), "('docker images')\n", (2252, 2269), False, 'from fabric.api import cd, sudo, run as fabrun, env, settings, abort, hide\n'), ((2298, 2349), 'fabric.api.sudo', 'sudo', (['"""docker rm `docker ps -a -q`"""'], {'warn_only': '(True)'}), "('docker rm `docker ps -a -q`', warn_only=True)\n", (2302, 2349), False, 'from fabric.api import cd, sudo, run as fabrun, env, settings, abort, hide\n'), ((2375, 2461), 'fabric.api.sudo', 'sudo', (['"""docker rmi $(docker images | awk \'/^<none>/ { print $3 }\')"""'], {'warn_only': '(True)'}), '("docker rmi $(docker images | awk \'/^<none>/ { print $3 }\')",\n warn_only=True)\n', (2379, 2461), False, 'from fabric.api import cd, sudo, run as fabrun, env, settings, abort, hide\n'), ((485, 495), 'fabric.api.cd', 'cd', (['"""/src"""'], {}), "('/src')\n", (487, 495), False, 'from fabric.api import cd, sudo, run as fabrun, env, settings, abort, hide\n'), ((505, 544), 'fabric.api.sudo', 'sudo', (['"""docker build -t server:latest ."""'], {}), "('docker build -t server:latest .')\n", (509, 544), False, 'from fabric.api import cd, sudo, run as fabrun, env, settings, abort, hide\n'), ((1054, 1101), 'fabric.api.hide', 'hide', (['"""running"""', '"""stdout"""', '"""stderr"""', '"""warnings"""'], {}), "('running', 'stdout', 'stderr', 'warnings')\n", (1058, 1101), False, 'from fabric.api import cd, sudo, run as fabrun, env, settings, abort, hide\n'), ((1444, 1480), 'fabric.api.settings', 'settings', ([], {'warn_only': '(True)', 'quiet': '(True)'}), '(warn_only=True, quiet=True)\n', (1452, 1480), False, 'from fabric.api import cd, sudo, run as fabrun, env, settings, abort, hide\n'), ((1722, 1746), 'fabric.api.abort', 'abort', (['"""request failed."""'], {}), "('request failed.')\n", (1727, 1746), False, 'from fabric.api import cd, sudo, run as fabrun, env, settings, abort, hide\n'), ((2171, 2188), 'fabric.api.sudo', 'sudo', (['"""docker ps"""'], {}), "('docker ps')\n", (2175, 2188), False, 'from fabric.api import cd, sudo, run as fabrun, env, settings, abort, hide\n'), ((2207, 2227), 'fabric.api.sudo', 'sudo', (['"""docker ps -a"""'], {}), "('docker ps -a')\n", (2211, 2227), False, 'from fabric.api import cd, sudo, run as fabrun, env, settings, abort, hide\n'), ((1635, 1650), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (1645, 1650), False, 'import time\n'), ((1289, 1303), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1301, 1303), False, 'from datetime import datetime\n')] |
# Copyright (c) LinkedIn Corporation. All rights reserved. Licensed under the BSD-2 Clause license.
# See LICENSE in the project root for license information.
import requests
import time
from testutils import prefix,api_v0
start, end = int(time.time()), int(time.time() + 36000)
start = start / 1000 * 1000
end = end / 1000 * 1000
# Helper function to send an override request
def override(start_time, end_time, ev_ids, user):
re = requests.post(api_v0('events/override'),
json={'start': start_time,
'end': end_time,
'event_ids': ev_ids,
'user': user})
assert re.status_code == 200
return re
# Test override when events need to be split
@prefix('test_v0_override_split')
def test_api_v0_override_split(team, user, role, event):
team_name = team.create()
user_name = user.create()
override_user = user.create()
role_name = role.create()
user.add_to_team(user_name, team_name)
user.add_to_team(override_user, team_name)
ev_id = event.create({'start': start,
'end': end,
'user': user_name,
'team': team_name,
'role': role_name})
re = override(start + 100, end - 100, [ev_id], override_user)
data = re.json()
assert len(data) == 3
re = requests.get(api_v0('events?user=' + user_name))
events = sorted(re.json(), key=lambda x: x['start'])
assert len(events) == 2
assert events[0]['end'] == start + 100
assert events[1]['start'] == end - 100
re = requests.get(api_v0('events?user=' + override_user))
events = re.json()
assert events[0]['start'] == start + 100
assert events[0]['end'] == end - 100
# Test override when an event's start needs to be edited
@prefix('test_v0_override_edit_start')
def test_api_v0_override_edit_start(team, user, role, event):
team_name = team.create()
user_name = user.create()
override_user = user.create()
role_name = role.create()
user.add_to_team(user_name, team_name)
user.add_to_team(override_user, team_name)
ev_id = event.create({'start': start,
'end': end,
'user': user_name,
'team': team_name,
'role': role_name})
re = override(start, end - 100, [ev_id], override_user)
data = re.json()
assert len(data) == 2
re = requests.get(api_v0('events?user=' + user_name))
events = re.json()
assert len(events) == 1
assert events[0]['end'] == end
assert events[0]['start'] == end - 100
re = requests.get(api_v0('events?user=' + override_user))
events = re.json()
assert events[0]['start'] == start
assert events[0]['end'] == end - 100
# Test override when an event's end needs to be edited
@prefix('test_api_v0_override_edit_end')
def test_api_v0_override_edit_end(team, user, role, event):
team_name = team.create()
user_name = user.create()
override_user = user.create()
role_name = role.create()
user.add_to_team(user_name, team_name)
user.add_to_team(override_user, team_name)
ev_id = event.create({'start': start,
'end': end,
'user': user_name,
'team': team_name,
'role': role_name})
re = override(start + 100, end, [ev_id], override_user)
data = re.json()
assert len(data) == 2
re = requests.get(api_v0('events?user=' + user_name))
events = re.json()
assert len(events) == 1
assert events[0]['end'] == start + 100
assert events[0]['start'] == start
re = requests.get(api_v0('events?user=' + override_user))
events = re.json()
assert events[0]['start'] == start + 100
assert events[0]['end'] == end
# Test override when an event needs to be deleted
@prefix('test_api_v0_override_delete')
def test_api_v0_override_delete(team, user, role, event):
team_name = team.create()
user_name = user.create()
override_user = user.create()
role_name = role.create()
user.add_to_team(user_name, team_name)
user.add_to_team(override_user, team_name)
ev_id = event.create({'start': start,
'end': end,
'user': user_name,
'team': team_name,
'role': role_name})
re = override(start - 10, end + 10, [ev_id], override_user)
assert len(re.json()) == 1
re = requests.get(api_v0('events?user=' + user_name))
events = re.json()
assert len(events) == 0
re = requests.get(api_v0('events?user=' + override_user))
events = re.json()
assert events[0]['start'] == start
assert events[0]['end'] == end
# Test combination of above cases
@prefix('test_api_v0_override_multiple')
def test_api_v0_override_multiple(team, user, role, event):
team_name = team.create()
role_name = role.create()
user_name = user.create()
override_user = user.create()
user.add_to_team(user_name, team_name)
user.add_to_team(override_user, team_name)
ev1 = event.create({'start': start-1000,
'end': start+1000,
'user': user_name,
'team': team_name,
'role': role_name})
ev2 = event.create({'start': start+1000,
'end': start+2000,
'user': user_name,
'team': team_name,
'role': role_name})
ev3 = event.create({'start': start+2000,
'end': end-1000,
'user': user_name,
'team': team_name,
'role': role_name})
ev4 = event.create({'start': end-1000,
'end': end+1000,
'user': user_name,
'team': team_name,
'role': role_name})
re = override(start, end, [ev1, ev2, ev3, ev4], override_user)
assert len(re.json()) == 3
re = requests.get(api_v0('events?user=' + user_name))
events = sorted(re.json(), key=lambda x: x['start'])
assert len(events) == 2
assert events[0]['start'] == start - 1000
assert events[0]['end'] == start
assert events[1]['start'] == end
assert events[1]['end'] == end + 1000
re = requests.get(api_v0('events?user=' + override_user))
events = re.json()
assert events[0]['start'] == start
assert events[0]['end'] == end
| [
"time.time",
"testutils.prefix",
"testutils.api_v0"
] | [((766, 798), 'testutils.prefix', 'prefix', (['"""test_v0_override_split"""'], {}), "('test_v0_override_split')\n", (772, 798), False, 'from testutils import prefix, api_v0\n'), ((1863, 1900), 'testutils.prefix', 'prefix', (['"""test_v0_override_edit_start"""'], {}), "('test_v0_override_edit_start')\n", (1869, 1900), False, 'from testutils import prefix, api_v0\n'), ((2914, 2953), 'testutils.prefix', 'prefix', (['"""test_api_v0_override_edit_end"""'], {}), "('test_api_v0_override_edit_end')\n", (2920, 2953), False, 'from testutils import prefix, api_v0\n'), ((3964, 4001), 'testutils.prefix', 'prefix', (['"""test_api_v0_override_delete"""'], {}), "('test_api_v0_override_delete')\n", (3970, 4001), False, 'from testutils import prefix, api_v0\n'), ((4894, 4933), 'testutils.prefix', 'prefix', (['"""test_api_v0_override_multiple"""'], {}), "('test_api_v0_override_multiple')\n", (4900, 4933), False, 'from testutils import prefix, api_v0\n'), ((242, 253), 'time.time', 'time.time', ([], {}), '()\n', (251, 253), False, 'import time\n'), ((454, 479), 'testutils.api_v0', 'api_v0', (['"""events/override"""'], {}), "('events/override')\n", (460, 479), False, 'from testutils import prefix, api_v0\n'), ((1424, 1458), 'testutils.api_v0', 'api_v0', (["('events?user=' + user_name)"], {}), "('events?user=' + user_name)\n", (1430, 1458), False, 'from testutils import prefix, api_v0\n'), ((1654, 1692), 'testutils.api_v0', 'api_v0', (["('events?user=' + override_user)"], {}), "('events?user=' + override_user)\n", (1660, 1692), False, 'from testutils import prefix, api_v0\n'), ((2525, 2559), 'testutils.api_v0', 'api_v0', (["('events?user=' + user_name)"], {}), "('events?user=' + user_name)\n", (2531, 2559), False, 'from testutils import prefix, api_v0\n'), ((2713, 2751), 'testutils.api_v0', 'api_v0', (["('events?user=' + override_user)"], {}), "('events?user=' + override_user)\n", (2719, 2751), False, 'from testutils import prefix, api_v0\n'), ((3576, 3610), 'testutils.api_v0', 'api_v0', (["('events?user=' + user_name)"], {}), "('events?user=' + user_name)\n", (3582, 3610), False, 'from testutils import prefix, api_v0\n'), ((3768, 3806), 'testutils.api_v0', 'api_v0', (["('events?user=' + override_user)"], {}), "('events?user=' + override_user)\n", (3774, 3806), False, 'from testutils import prefix, api_v0\n'), ((4610, 4644), 'testutils.api_v0', 'api_v0', (["('events?user=' + user_name)"], {}), "('events?user=' + user_name)\n", (4616, 4644), False, 'from testutils import prefix, api_v0\n'), ((4720, 4758), 'testutils.api_v0', 'api_v0', (["('events?user=' + override_user)"], {}), "('events?user=' + override_user)\n", (4726, 4758), False, 'from testutils import prefix, api_v0\n'), ((6197, 6231), 'testutils.api_v0', 'api_v0', (["('events?user=' + user_name)"], {}), "('events?user=' + user_name)\n", (6203, 6231), False, 'from testutils import prefix, api_v0\n'), ((6503, 6541), 'testutils.api_v0', 'api_v0', (["('events?user=' + override_user)"], {}), "('events?user=' + override_user)\n", (6509, 6541), False, 'from testutils import prefix, api_v0\n'), ((260, 271), 'time.time', 'time.time', ([], {}), '()\n', (269, 271), False, 'import time\n')] |
import json
from genie import uid, with_clip
class MinifyJsonCommand:
def metadata(self, parameters):
arg = "minify-json"
return dict(
uid=uid(1),
arg=arg,
title="Minify JSON",
subtitle="Minify JSON from Clipboard",
)
@with_clip
def process(self, input_from_clipboard):
try:
json_temp = json.loads(input_from_clipboard)
return json.dumps(json_temp, separators=(",", ":"))
except Exception as e:
return "Error processing: {} - {}".format(input_from_clipboard, e)
minify_json_command = MinifyJsonCommand()
| [
"json.loads",
"json.dumps",
"genie.uid"
] | [((395, 427), 'json.loads', 'json.loads', (['input_from_clipboard'], {}), '(input_from_clipboard)\n', (405, 427), False, 'import json\n'), ((447, 491), 'json.dumps', 'json.dumps', (['json_temp'], {'separators': "(',', ':')"}), "(json_temp, separators=(',', ':'))\n", (457, 491), False, 'import json\n'), ((174, 180), 'genie.uid', 'uid', (['(1)'], {}), '(1)\n', (177, 180), False, 'from genie import uid, with_clip\n')] |
from rest_framework import serializers
from .models import (
Program,
ComponentProgram,
Course,
Preparation,
Activity,
Assessment,
Artifact,
Strategy,
Node,
NodeStrategy,
StrategyActivity,
ComponentWeek,
WeekCourse,
Component,
Week,
Discipline,
Outcome,
OutcomeNode,
OutcomeStrategy,
OutcomePreparation,
OutcomeActivity,
OutcomeAssessment,
OutcomeArtifact,
OutcomeWeek,
OutcomeCourse,
OutcomeProgram,
NodeCompletionStatus,
ComponentCompletionStatus,
User,
)
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ["email", "username"]
class OutcomeSerializer(serializers.ModelSerializer):
author = serializers.SlugRelatedField(
read_only=True, slug_field="username"
)
class Meta:
model = Outcome
fields = [
"id",
"title",
"description",
"created_on",
"last_modified",
"hash",
"author",
]
def create(self, validated_data):
return Outcome.objects.create(
author=User.objects.get(username=self.initial_data["author"]),
**validated_data
)
def update(self, instance, validated_data):
instance.title = validated_data.get("title", instance.title)
instance.description = validated_data.get(
"description", instance.description
)
instance.save()
return instance
class OutcomeNodeSerializer(serializers.ModelSerializer):
outcome = OutcomeSerializer(allow_null=True)
class Meta:
model = OutcomeNode
fields = ["node", "outcome", "added_on", "rank", "id"]
def update(self, instance, validated_data):
instance.rank = validated_data.get("rank", instance.title)
outcome_data = self.initial_data.pop("outcome")
outcome_serializer = OutcomeSerializer(
Outcome.objects.get(id=outcome_data["id"]), outcome_data
)
outcome_serializer.is_valid()
outcome_serializer.save()
instance.save()
return instance
class ParentNodeSerializer(serializers.ModelSerializer):
author = serializers.SlugRelatedField(
read_only=True, slug_field="username"
)
class Meta:
model = Node
fields = [
"id",
"title",
"description",
"last_modified",
"hash",
"author",
"work_classification",
"activity_classification",
"classification",
]
class ParentStrategySerializer(serializers.ModelSerializer):
author = serializers.SlugRelatedField(
read_only=True, slug_field="username"
)
class Meta:
model = Strategy
fields = [
"id",
"title",
"description",
"last_modified",
"hash",
"author",
]
class NodeSerializer(serializers.ModelSerializer):
author = serializers.SlugRelatedField(
read_only=True, slug_field="username"
)
outcomenode_set = serializers.SerializerMethodField()
parent_node = ParentNodeSerializer(allow_null=True)
class Meta:
model = Node
fields = [
"id",
"title",
"description",
"created_on",
"last_modified",
"hash",
"author",
"work_classification",
"activity_classification",
"classification",
"outcomenode_set",
"is_original",
"parent_node",
]
def get_outcomenode_set(self, instance):
links = instance.outcomenode_set.all().order_by("rank")
return OutcomeNodeSerializer(links, many=True).data
def create(self, validated_data):
return Node.objects.create(
author=User.objects.get(username=self.initial_data["author"]),
**validated_data
)
def update(self, instance, validated_data):
instance.title = validated_data.get("title", instance.title)
instance.description = validated_data.get(
"description", instance.description
)
instance.classification = validated_data.get(
"classification", instance.classification
)
instance.work_classification = validated_data.get(
"work_classification", instance.work_classification
)
instance.activity_classification = validated_data.get(
"activity_classification", instance.activity_classification
)
for outcomenode_data in self.initial_data.pop("outcomenode_set"):
outcomenode_serializer = OutcomeNodeSerializer(
OutcomeNode.objects.get(id=outcomenode_data["id"]),
data=outcomenode_data,
)
outcomenode_serializer.is_valid()
outcomenode_serializer.save()
instance.save()
return instance
class NodeStrategySerializer(serializers.ModelSerializer):
node = NodeSerializer()
class Meta:
model = NodeStrategy
fields = ["strategy", "node", "added_on", "rank", "id"]
def update(self, instance, validated_data):
instance.rank = validated_data["rank"]
node_data = self.initial_data.pop("node")
node_serializer = NodeSerializer(
Node.objects.get(id=node_data["id"]), node_data
)
node_serializer.is_valid()
node_serializer.save()
instance.save()
return instance
class OutcomeStrategySerializer(serializers.ModelSerializer):
outcome = OutcomeSerializer()
class Meta:
model = OutcomeStrategy
fields = ["strategy", "outcome", "added_on", "rank", "id"]
def update(self, instance, validated_data):
instance.rank = validated_data.get("rank", instance.title)
outcome_data = self.initial_data.pop("outcome")
outcome_serializer = OutcomeSerializer(
Outcome.objects.get(id=outcome_data["id"]), outcome_data
)
outcome_serializer.is_valid()
outcome_serializer.save()
instance.save()
return instance
class StrategySerializer(serializers.ModelSerializer):
author = serializers.SlugRelatedField(
read_only=True, slug_field="username"
)
nodestrategy_set = serializers.SerializerMethodField()
outcomestrategy_set = serializers.SerializerMethodField()
parent_strategy = ParentStrategySerializer(allow_null=True)
num_children = serializers.SerializerMethodField(read_only=True)
class Meta:
model = Strategy
fields = [
"id",
"title",
"description",
"created_on",
"last_modified",
"hash",
"default",
"author",
"nodestrategy_set",
"outcomestrategy_set",
"is_original",
"parent_strategy",
"num_children",
]
def get_num_children(self, instance):
return instance.strategy_set.count()
def get_nodestrategy_set(self, instance):
links = instance.nodestrategy_set.all().order_by("rank")
return NodeStrategySerializer(links, many=True).data
def get_outcomestrategy_set(self, instance):
links = instance.outcomestrategy_set.all().order_by("rank")
return OutcomeStrategySerializer(links, many=True).data
def create(self, validated_data):
return Strategy.objects.create(
author=User.objects.get(username=self.initial_data["author"]),
**validated_data
)
def update(self, instance, validated_data):
instance.title = validated_data.get("title", instance.title)
instance.description = validated_data.get(
"description", instance.description
)
for nodestrategy_data in self.initial_data.pop("nodestrategy_set"):
nodestrategy_serializer = NodeStrategySerializer(
NodeStrategy.objects.get(id=nodestrategy_data["id"]),
data=nodestrategy_data,
)
nodestrategy_serializer.is_valid()
nodestrategy_serializer.save()
for outcomestrategy_data in self.initial_data.pop(
"outcomestrategy_set"
):
outcomestrategy_serializer = OutcomeStrategySerializer(
OutcomeStrategy.objects.get(id=outcomestrategy_data["id"]),
data=outcomestrategy_data,
)
outcomestrategy_serializer.is_valid()
outcomestrategy_serializer.save()
instance.save()
return instance
class StrategyActivitySerializer(serializers.ModelSerializer):
strategy = StrategySerializer()
class Meta:
model = StrategyActivity
fields = ["activity", "strategy", "added_on", "rank", "id"]
def update(self, instance, validated_data):
instance.rank = validated_data.get("rank", instance.rank)
strategy_data = self.initial_data.pop("strategy")
strategy_serializer = StrategySerializer(
Strategy.objects.get(id=strategy_data["id"]), strategy_data
)
strategy_serializer.is_valid()
strategy_serializer.save()
instance.save()
return instance
class OutcomeActivitySerializer(serializers.ModelSerializer):
outcome = OutcomeSerializer()
class Meta:
model = OutcomeActivity
fields = ["activity", "outcome", "added_on", "rank", "id"]
def update(self, instance, validated_data):
instance.rank = validated_data.get("rank", instance.rank)
outcome_data = self.initial_data.pop("outcome")
outcome_serializer = OutcomeSerializer(
Outcome.objects.get(id=outcome_data["id"]), outcome_data
)
outcome_serializer.is_valid()
outcome_serializer.save()
instance.save()
return instance
class ActivitySerializer(serializers.ModelSerializer):
author = serializers.SlugRelatedField(
read_only=True, slug_field="username"
)
strategyactivity_set = serializers.SerializerMethodField()
outcomeactivity_set = serializers.SerializerMethodField()
class Meta:
model = Activity
fields = [
"id",
"title",
"description",
"author",
"created_on",
"last_modified",
"hash",
"strategyactivity_set",
"outcomeactivity_set",
"is_original",
"parent_activity",
]
def get_strategyactivity_set(self, instance):
links = instance.strategyactivity_set.all().order_by("rank")
return StrategyActivitySerializer(links, many=True).data
def get_outcomeactivity_set(self, instance):
links = instance.outcomeactivity_set.all().order_by("rank")
return OutcomeActivitySerializer(links, many=True).data
def create(self, validated_data):
if User.objects.filter(username=self.initial_data["author"]).exists():
author = User.objects.get(username=self.initial_data["author"])
else:
author = None
activity = Activity.objects.create(author=author, **validated_data)
"""
do not update the following code, this will only be used for default strategy creation
"""
if "strategyactivity_set" in self.initial_data.keys():
Strategy.objects.filter(default=True).update(default=False)
for strategyactivity_data in self.initial_data.pop(
"strategyactivity_set"
):
strategy_data = strategyactivity_data.pop("strategy")
null_author = strategy_data.pop("author")
nodestrategy_set = strategy_data.pop("nodestrategy_set")
outcomestategy_set = strategy_data.pop("outcomestrategy_set")
strategy = Strategy.objects.create(
author=author, **strategy_data
)
link = StrategyActivity.objects.create(
strategy=strategy,
activity=activity,
rank=strategyactivity_data["rank"],
)
for nodestrategy_data in nodestrategy_set:
node_data = nodestrategy_data.pop("node")
null_author = node_data.pop("author")
outcomenode_set = node_data.pop("outcomenode_set")
node = Node.objects.create(author=author, **node_data)
link = NodeStrategy.objects.create(
node=node,
strategy=strategy,
rank=nodestrategy_data["rank"],
)
return activity
def update(self, instance, validated_data):
instance.title = validated_data.get("title", instance.title)
instance.description = validated_data.get(
"description", instance.description
)
for strategyactivity_data in self.initial_data.pop(
"strategyactivity_set"
):
strategyactivity_serializer = StrategyActivitySerializer(
StrategyActivity.objects.get(id=strategyactivity_data["id"]),
data=strategyactivity_data,
)
strategyactivity_serializer.is_valid()
strategyactivity_serializer.save()
for outcomeactivity_data in self.initial_data.pop(
"outcomeactivity_set"
):
outcomeactivity_serializer = OutcomeActivitySerializer(
OutcomeActivity.objects.get(id=outcomeactivity_data["id"]),
data=outcomeactivity_data,
)
outcomeactivity_serializer.is_valid()
outcomeactivity_serializer.save()
instance.save()
return instance
class OutcomePreparationSerializer(serializers.ModelSerializer):
outcome = OutcomeSerializer()
class Meta:
model = OutcomeActivity
fields = ["preparation", "outcome", "added_on", "rank", "id"]
def update(self, instance, validated_data):
instance.rank = validated_data.get("rank", instance.title)
outcome_data = self.initial_data.pop("outcome")
outcome_serializer = OutcomeSerializer(
Outcome.objects.get(id=outcome_data["id"]), outcome_data
)
outcome_serializer.is_valid()
outcome_serializer.save()
instance.save()
return instance
class PreparationSerializer(serializers.ModelSerializer):
author = serializers.SlugRelatedField(
read_only=True, slug_field="username"
)
outcomepreparation_set = serializers.SerializerMethodField()
class Meta:
model = Preparation
fields = [
"id",
"title",
"description",
"author",
"created_on",
"last_modified",
"hash",
"outcomepreparation_set",
"is_original",
"parent_preparation",
]
def get_outcomepreparation_set(self, instance):
links = instance.outcomepreparation_set.all().order_by("rank")
return OutcomePreparationSerializer(links, many=True).data
def create(self, validated_data):
return Preparation.objects.create(
author=User.objects.get(username=self.initial_data["author"]),
**validated_data
)
def update(self, instance, validated_data):
instance.title = validated_data.get("title", instance.title)
instance.description = validated_data.get(
"description", instance.description
)
for outcomepreparation_data in self.initial_data.pop(
"outcomepreparation_set"
):
outcomepreparation_serializer = OutcomePreparationSerializer(
OutcomePreparation.objects.get(
id=outcomepreparation_data["id"]
),
data=outcomepreparation_data,
)
outcomepreparation_serializer.is_valid()
outcomepreparation_serializer.save()
instance.save()
return instance
class OutcomeAssessmentSerializer(serializers.ModelSerializer):
outcome = OutcomeSerializer()
class Meta:
model = OutcomeAssessment
fields = ["assessment", "outcome", "added_on", "rank", "id"]
def update(self, instance, validated_data):
instance.rank = validated_data.get("rank", instance.title)
outcome_data = self.initial_data.pop("outcome")
outcome_serializer = OutcomeSerializer(
Outcome.objects.get(id=outcome_data["id"]), outcome_data
)
outcome_serializer.is_valid()
outcome_serializer.save()
instance.save()
return instance
class AssessmentSerializer(serializers.ModelSerializer):
author = serializers.SlugRelatedField(
read_only=True, slug_field="username"
)
outcomeassessment_set = serializers.SerializerMethodField()
class Meta:
model = Assessment
fields = [
"id",
"title",
"description",
"author",
"created_on",
"last_modified",
"hash",
"outcomeassessment_set",
"is_original",
"parent_assessment",
]
def get_outcomeassessment_set(self, instance):
links = instance.outcomeassessment_set.all().order_by("rank")
return OutcomeAssessmentSerializer(links, many=True).data
def create(self, validated_data):
return Assessment.objects.create(
author=User.objects.get(username=self.initial_data["author"]),
**validated_data
)
def update(self, instance, validated_data):
instance.title = validated_data.get("title", instance.title)
instance.description = validated_data.get(
"description", instance.description
)
for outcomeassessment_data in self.initial_data.pop(
"outcomeassessment_set"
):
outcomeassessment_serializer = OutcomeAssessmentSerializer(
OutcomeAssessment.objects.get(id=outcomeassessment_data["id"]),
data=outcomeassessment_data,
)
outcomeassessment_serializer.is_valid()
outcomeassessment_serializer.save()
instance.save()
return instance
class OutcomeArtifactSerializer(serializers.ModelSerializer):
outcome = OutcomeSerializer()
class Meta:
model = OutcomeArtifact
fields = ["artifact", "outcome", "added_on", "rank", "id"]
def update(self, instance, validated_data):
instance.rank = validated_data.get("rank", instance.title)
outcome_data = self.initial_data.pop("outcome")
outcome_serializer = OutcomeSerializer(
Outcome.objects.get(id=outcome_data["id"]), outcome_data
)
outcome_serializer.is_valid()
outcome_serializer.save()
instance.save()
return instance
class ArtifactSerializer(serializers.ModelSerializer):
author = serializers.SlugRelatedField(
read_only=True, slug_field="username"
)
outcomeartifact_set = serializers.SerializerMethodField()
class Meta:
model = Artifact
fields = [
"id",
"title",
"description",
"author",
"created_on",
"last_modified",
"hash",
"outcomeartifact_set",
"is_original",
"parent_artifact",
]
def get_outcomeartifact_set(self, instance):
links = instance.outcomeartifact_set.all().order_by("rank")
return OutcomeArtifactSerializer(links, many=True).data
def create(self, validated_data):
return Artifact.objects.create(
author=User.objects.get(username=self.initial_data["author"]),
**validated_data
)
def update(self, instance, validated_data):
instance.title = validated_data.get("title", instance.title)
instance.description = validated_data.get(
"description", instance.description
)
for outcomeartifact_data in self.initial_data.pop(
"outcomeartifact_set"
):
outcomeartifact_serializer = OutcomeArtifactSerializer(
OutcomeArtifact.objects.get(id=outcomeartifact_data["id"]),
data=outcomeartifact_data,
)
outcomeartifact_serializer.is_valid()
outcomeartifact_serializer.save()
instance.save()
return instance
class WeekLevelComponentSerializer(serializers.ModelSerializer):
content_object = serializers.SerializerMethodField()
content_type = serializers.SerializerMethodField()
content_type_in_text = serializers.SerializerMethodField()
class Meta:
model = Component
fields = [
"content_object",
"content_type",
"content_type_in_text",
"id",
]
def get_content_object(self, instance):
if type(instance.content_object) == Activity:
return ActivitySerializer(instance.content_object).data
elif type(instance.content_object) == Preparation:
return PreparationSerializer(instance.content_object).data
elif type(instance.content_object) == Assessment:
return AssessmentSerializer(instance.content_object).data
else:
return ArtifactSerializer(instance.content_object).data
def get_content_type(self, instance):
if type(instance.content_object) == Activity:
return 0
elif type(instance.content_object) == Preparation:
return 1
elif type(instance.content_object) == Assessment:
return 2
else:
return 3
def get_content_type_in_text(self, instance):
if type(instance.content_object) == Activity:
return "activity"
elif type(instance.content_object) == Preparation:
return "preparation"
elif type(instance.content_object) == Assessment:
return "assessment"
else:
return "artifact"
def update(self, instance, validated_data):
content_object_data = self.initial_data.pop("content_object")
if type(instance.content_object) == Activity:
content_object_serializer = ActivitySerializer(
Activity.objects.get(id=content_object_data["id"]),
data=content_object_data,
)
elif type(instance.content_object) == Preparation:
content_object_serializer = PreparationSerializer(
Preparation.objects.get(id=content_object_data["id"]),
data=content_object_data,
)
elif type(instance.content_object) == Assessment:
content_object_serializer = AssessmentSerializer(
Assessment.objects.get(id=content_object_data["id"]),
data=content_object_data,
)
else:
content_object_serializer = ArtifactSerializer(
Artifact.objects.get(id=content_object_data["id"]),
data=content_object_data,
)
content_object_serializer.is_valid()
content_object_serializer.save()
instance.save()
return instance
class ComponentWeekSerializer(serializers.ModelSerializer):
component = WeekLevelComponentSerializer()
class Meta:
model = ComponentWeek
fields = ["week", "component", "added_on", "rank", "id"]
def update(self, instance, validated_data):
instance.rank = validated_data.get("rank", instance.rank)
component_data = self.initial_data.pop("component")
component_serializer = WeekLevelComponentSerializer(
Component.objects.get(id=component_data["id"]), component_data
)
component_serializer.is_valid()
component_serializer.save()
instance.save()
return instance
class OutcomeWeekSerializer(serializers.ModelSerializer):
outcome = OutcomeSerializer()
class Meta:
model = OutcomeWeek
fields = ["week", "outcome", "added_on", "rank", "id"]
def update(self, instance, validated_data):
instance.rank = validated_data.get("rank", instance.title)
outcome_data = self.initial_data.pop("outcome")
outcome_serializer = OutcomeSerializer(
Outcome.objects.get(id=outcome_data["id"]), outcome_data
)
outcome_serializer.is_valid()
outcome_serializer.save()
instance.save()
return instance
class WeekSerializer(serializers.ModelSerializer):
componentweek_set = serializers.SerializerMethodField()
author = serializers.SlugRelatedField(
read_only=True, slug_field="username"
)
outcomeweek_set = serializers.SerializerMethodField()
class Meta:
model = Week
fields = [
"id",
"title",
"hash",
"created_on",
"last_modified",
"author",
"componentweek_set",
"outcomeweek_set",
]
def get_componentweek_set(self, instance):
links = instance.componentweek_set.all().order_by("rank")
return ComponentWeekSerializer(links, many=True).data
def get_outcomeweek_set(self, instance):
links = instance.outcomeweek_set.all().order_by("rank")
return OutcomeWeekSerializer(links, many=True).data
def create(self, validated_data):
return Week.objects.create(
author=User.objects.get(username=self.initial_data["author"]),
**validated_data
)
def update(self, instance, validated_data):
instance.title = validated_data.get("title", instance.title)
for componentweek_data in self.initial_data.pop("componentweek_set"):
componentweek_serializer = ComponentWeekSerializer(
ComponentWeek.objects.get(id=componentweek_data["id"]),
data=componentweek_data,
)
componentweek_serializer.is_valid()
componentweek_serializer.save()
for outcomeweek_data in self.initial_data.pop("outcomeweek_set"):
outcomeweek_serializer = OutcomeWeekSerializer(
OutcomeWeek.objects.get(id=outcomeweek_data["id"]),
data=outcomeweek_data,
)
outcomeweek_serializer.is_valid()
outcomeweek_serializer.save()
instance.save()
return instance
class DisciplineSerializer(serializers.ModelSerializer):
class Meta:
model = Discipline
fields = ["id", "title"]
class WeekCourseSerializer(serializers.ModelSerializer):
week = WeekSerializer()
class Meta:
model = WeekCourse
fields = ["course", "week", "added_on", "rank", "id"]
def update(self, instance, validated_data):
instance.rank = validated_data.get("rank", instance.rank)
week_data = self.initial_data.pop("week")
week_serializer = WeekSerializer(
Week.objects.get(id=week_data["id"]), week_data
)
week_serializer.is_valid()
week_serializer.save()
instance.save()
return instance
class OutcomeCourseSerializer(serializers.ModelSerializer):
outcome = OutcomeSerializer()
class Meta:
model = OutcomeCourse
fields = ["course", "outcome", "added_on", "rank", "id"]
def update(self, instance, validated_data):
instance.rank = validated_data.get("rank", instance.title)
outcome_data = self.initial_data.pop("outcome")
outcome_serializer = OutcomeSerializer(
Outcome.objects.get(id=outcome_data["id"]), outcome_data
)
outcome_serializer.is_valid()
outcome_serializer.save()
instance.save()
return instance
class CourseSerializer(serializers.ModelSerializer):
weekcourse_set = serializers.SerializerMethodField()
author = serializers.SlugRelatedField(
read_only=True, slug_field="username"
)
discipline = DisciplineSerializer(read_only=True)
outcomecourse_set = serializers.SerializerMethodField()
class Meta:
model = Course
fields = [
"id",
"title",
"description",
"author",
"created_on",
"last_modified",
"hash",
"weekcourse_set",
"outcomecourse_set",
"discipline",
"is_original",
"parent_course",
]
def get_weekcourse_set(self, instance):
links = instance.weekcourse_set.all().order_by("rank")
return WeekCourseSerializer(links, many=True).data
def get_outcomecourse_set(self, instance):
links = instance.outcomecourse_set.all().order_by("rank")
return OutcomeCourseSerializer(links, many=True).data
def create(self, validated_data):
return Course.objects.create(
author=User.objects.get(username=self.initial_data["author"]),
**validated_data
)
def update(self, instance, validated_data):
instance.title = validated_data.get("title", instance.title)
instance.description = validated_data.get(
"description", instance.description
)
for weekcourse_data in self.initial_data.pop("weekcourse_set"):
weekcourse_serializer = WeekCourseSerializer(
WeekCourse.objects.get(id=weekcourse_data["id"]),
data=weekcourse_data,
)
weekcourse_serializer.is_valid()
weekcourse_serializer.save()
for outcomecourse_data in self.initial_data.pop("outcomecourse_set"):
outcomecourse_serializer = OutcomeCourseSerializer(
OutcomeCourse.objects.get(id=outcomecourse_data["id"]),
data=outcomecourse_data,
)
outcomecourse_serializer.is_valid()
outcomecourse_serializer.save()
instance.save()
return instance
class ProgramLevelComponentSerializer(serializers.ModelSerializer):
content_object = serializers.SerializerMethodField()
content_type = serializers.SerializerMethodField()
content_type_in_text = serializers.SerializerMethodField()
class Meta:
model = Component
fields = [
"content_object",
"content_type",
"content_type_in_text",
"id",
]
def get_content_object(self, instance):
if type(instance.content_object) == Course:
return CourseSerializer(instance.content_object).data
else:
return AssessmentSerializer(instance.content_object).data
def get_content_type(self, instance):
if type(instance.content_object) == Course:
return 0
else:
return 1
def get_content_type_in_text(self, instance):
if type(instance.content_object) == Course:
return "course"
else:
return "assessment"
def update(self, instance, validated_data):
content_object_data = self.initial_data.pop("content_object")
if type(instance.content_object) == Course:
content_object_serializer = CourseSerializer(
Course.objects.get(id=content_object_data["id"]),
data=content_object_data,
)
else:
content_object_serializer = AssessmentSerializer(
Assessment.objects.get(id=content_object_data["id"]),
data=content_object_data,
)
content_object_serializer.is_valid()
content_object_serializer.save()
instance.save()
return instance
class ComponentProgramSerializer(serializers.ModelSerializer):
component = ProgramLevelComponentSerializer()
class Meta:
model = ComponentProgram
fields = ["program", "component", "added_on", "rank", "id"]
def update(self, instance, validated_data):
instance.rank = validated_data.get("rank", instance.rank)
component_data = self.initial_data.pop("component")
component_serializer = ProgramLevelComponentSerializer(
Component.objects.get(id=component_data["id"]), component_data
)
component_serializer.is_valid()
component_serializer.save()
instance.save()
return instance
class OutcomeProgramSerializer(serializers.ModelSerializer):
outcome = OutcomeSerializer()
class Meta:
model = OutcomeProgram
fields = ["course", "outcome", "added_on", "rank", "id"]
def update(self, instance, validated_data):
instance.rank = validated_data.get("rank", instance.title)
outcome_data = self.initial_data.pop("outcome")
outcome_serializer = OutcomeSerializer(
Outcome.objects.get(id=outcome_data["id"]), outcome_data
)
outcome_serializer.is_valid()
outcome_serializer.save()
instance.save()
return instance
class ProgramSerializer(serializers.ModelSerializer):
componentprogram_set = serializers.SerializerMethodField()
author = serializers.SlugRelatedField(
read_only=True, slug_field="username"
)
outcomeprogram_set = serializers.SerializerMethodField()
class Meta:
model = Program
fields = [
"id",
"title",
"description",
"author",
"created_on",
"last_modified",
"hash",
"componentprogram_set",
"outcomeprogram_set",
]
def get_componentprogram_set(self, instance):
links = instance.componentprogram_set.all().order_by("rank")
return ComponentProgramSerializer(links, many=True).data
def get_outcomeprogram_set(self, instance):
links = instance.outcomeprogram_set.all().order_by("rank")
return OutcomeProgramSerializer(links, many=True).data
def create(self, validated_data):
return Program.objects.create(
author=User.objects.get(username=self.initial_data["author"]),
**validated_data
)
def update(self, instance, validated_data):
instance.title = validated_data.get("title", instance.title)
instance.description = validated_data.get(
"description", instance.description
)
for componentprogram_data in self.initial_data.pop(
"componentprogram_set"
):
componentprogram_serializer = ComponentProgramSerializer(
ComponentProgram.objects.get(id=componentprogram_data["id"]),
data=componentprogram_data,
)
componentprogram_serializer.is_valid()
componentprogram_serializer.save()
for outcomeprogram_data in self.initial_data.pop("outcomeprogram_set"):
outcomeprogram_serializer = OutcomeProgramSerializer(
OutcomeProgram.objects.get(id=outcomeprogram_data["id"]),
data=outcomeprogram_data,
)
outcomeprogram_serializer.is_valid()
outcomeprogram_serializer.save()
instance.save()
return instance
serializer_lookups = {
"node": NodeSerializer,
"strategy": StrategySerializer,
"activity": ActivitySerializer,
"assessment": AssessmentSerializer,
"preparation": PreparationSerializer,
"artifact": ArtifactSerializer,
"week": WeekSerializer,
"course": CourseSerializer,
"program": ProgramSerializer,
}
| [
"rest_framework.serializers.SlugRelatedField",
"rest_framework.serializers.SerializerMethodField"
] | [((773, 840), 'rest_framework.serializers.SlugRelatedField', 'serializers.SlugRelatedField', ([], {'read_only': '(True)', 'slug_field': '"""username"""'}), "(read_only=True, slug_field='username')\n", (801, 840), False, 'from rest_framework import serializers\n'), ((2265, 2332), 'rest_framework.serializers.SlugRelatedField', 'serializers.SlugRelatedField', ([], {'read_only': '(True)', 'slug_field': '"""username"""'}), "(read_only=True, slug_field='username')\n", (2293, 2332), False, 'from rest_framework import serializers\n'), ((2732, 2799), 'rest_framework.serializers.SlugRelatedField', 'serializers.SlugRelatedField', ([], {'read_only': '(True)', 'slug_field': '"""username"""'}), "(read_only=True, slug_field='username')\n", (2760, 2799), False, 'from rest_framework import serializers\n'), ((3089, 3156), 'rest_framework.serializers.SlugRelatedField', 'serializers.SlugRelatedField', ([], {'read_only': '(True)', 'slug_field': '"""username"""'}), "(read_only=True, slug_field='username')\n", (3117, 3156), False, 'from rest_framework import serializers\n'), ((3194, 3229), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (3227, 3229), False, 'from rest_framework import serializers\n'), ((6356, 6423), 'rest_framework.serializers.SlugRelatedField', 'serializers.SlugRelatedField', ([], {'read_only': '(True)', 'slug_field': '"""username"""'}), "(read_only=True, slug_field='username')\n", (6384, 6423), False, 'from rest_framework import serializers\n'), ((6462, 6497), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (6495, 6497), False, 'from rest_framework import serializers\n'), ((6525, 6560), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (6558, 6560), False, 'from rest_framework import serializers\n'), ((6646, 6695), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (6679, 6695), False, 'from rest_framework import serializers\n'), ((10121, 10188), 'rest_framework.serializers.SlugRelatedField', 'serializers.SlugRelatedField', ([], {'read_only': '(True)', 'slug_field': '"""username"""'}), "(read_only=True, slug_field='username')\n", (10149, 10188), False, 'from rest_framework import serializers\n'), ((10231, 10266), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (10264, 10266), False, 'from rest_framework import serializers\n'), ((10294, 10329), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (10327, 10329), False, 'from rest_framework import serializers\n'), ((14711, 14778), 'rest_framework.serializers.SlugRelatedField', 'serializers.SlugRelatedField', ([], {'read_only': '(True)', 'slug_field': '"""username"""'}), "(read_only=True, slug_field='username')\n", (14739, 14778), False, 'from rest_framework import serializers\n'), ((14823, 14858), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (14856, 14858), False, 'from rest_framework import serializers\n'), ((17036, 17103), 'rest_framework.serializers.SlugRelatedField', 'serializers.SlugRelatedField', ([], {'read_only': '(True)', 'slug_field': '"""username"""'}), "(read_only=True, slug_field='username')\n", (17064, 17103), False, 'from rest_framework import serializers\n'), ((17147, 17182), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (17180, 17182), False, 'from rest_framework import serializers\n'), ((19298, 19365), 'rest_framework.serializers.SlugRelatedField', 'serializers.SlugRelatedField', ([], {'read_only': '(True)', 'slug_field': '"""username"""'}), "(read_only=True, slug_field='username')\n", (19326, 19365), False, 'from rest_framework import serializers\n'), ((19407, 19442), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (19440, 19442), False, 'from rest_framework import serializers\n'), ((20910, 20945), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (20943, 20945), False, 'from rest_framework import serializers\n'), ((20966, 21001), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (20999, 21001), False, 'from rest_framework import serializers\n'), ((21030, 21065), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (21063, 21065), False, 'from rest_framework import serializers\n'), ((24981, 25016), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (25014, 25016), False, 'from rest_framework import serializers\n'), ((25031, 25098), 'rest_framework.serializers.SlugRelatedField', 'serializers.SlugRelatedField', ([], {'read_only': '(True)', 'slug_field': '"""username"""'}), "(read_only=True, slug_field='username')\n", (25059, 25098), False, 'from rest_framework import serializers\n'), ((25136, 25171), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (25169, 25171), False, 'from rest_framework import serializers\n'), ((28269, 28304), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (28302, 28304), False, 'from rest_framework import serializers\n'), ((28319, 28386), 'rest_framework.serializers.SlugRelatedField', 'serializers.SlugRelatedField', ([], {'read_only': '(True)', 'slug_field': '"""username"""'}), "(read_only=True, slug_field='username')\n", (28347, 28386), False, 'from rest_framework import serializers\n'), ((28481, 28516), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (28514, 28516), False, 'from rest_framework import serializers\n'), ((30490, 30525), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (30523, 30525), False, 'from rest_framework import serializers\n'), ((30546, 30581), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (30579, 30581), False, 'from rest_framework import serializers\n'), ((30610, 30645), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (30643, 30645), False, 'from rest_framework import serializers\n'), ((33488, 33523), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (33521, 33523), False, 'from rest_framework import serializers\n'), ((33538, 33605), 'rest_framework.serializers.SlugRelatedField', 'serializers.SlugRelatedField', ([], {'read_only': '(True)', 'slug_field': '"""username"""'}), "(read_only=True, slug_field='username')\n", (33566, 33605), False, 'from rest_framework import serializers\n'), ((33646, 33681), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (33679, 33681), False, 'from rest_framework import serializers\n')] |
import sys
import math
def euclides( m, n):
x0, x1 = 1, 0
y0, y1 = 0, 1
r0, r1 = m, n
r, i = n, 2
c = 0
while r != 0:
q = int(r0 / r1)
x = x1 * q + x0
y = y1 * q + y0
res = r0 % r1
print("{} = {} * {} + {}".format(int(r0), int(r1), int(q), int(res)))
r = (math.pow(-1, i) * x * m) + (math.pow(-1, (i+1)) * y * n)
i = i + 1
x0, x1 = x1, x
y0, y1 = y1, y
r0, r1 = r1, r
c = c + 1
x = math.pow(-1, i) * x0
y = math.pow(-1, i-1) * y0
r = int(x * m + y * n)
print("MCD({},{})={}".format(m, n, r))
if y < 0:
y2 = int(-1 * y)
print("{}*{} - {}*{} = {}".format(int(x), int(m), int(y2), int(n),int(r)))
else:
print("{}*{} + {}*{} = {}".format(int(x), int(m), int(y), int(n),int(r)))
print("Número de iteraciones = {}".format(c))
if __name__ == '__main__':
euclides(int(sys.argv[1]), int(sys.argv[2]))
| [
"math.pow"
] | [((498, 513), 'math.pow', 'math.pow', (['(-1)', 'i'], {}), '(-1, i)\n', (506, 513), False, 'import math\n'), ((527, 546), 'math.pow', 'math.pow', (['(-1)', '(i - 1)'], {}), '(-1, i - 1)\n', (535, 546), False, 'import math\n'), ((328, 343), 'math.pow', 'math.pow', (['(-1)', 'i'], {}), '(-1, i)\n', (336, 343), False, 'import math\n'), ((356, 375), 'math.pow', 'math.pow', (['(-1)', '(i + 1)'], {}), '(-1, i + 1)\n', (364, 375), False, 'import math\n')] |
from fractions import Fraction
from functools import partial
from hypothesis import strategies
MAX_NUMBER = 10 ** 10
MIN_NUMBER = -MAX_NUMBER
coordinates_strategies_factories = {
float: partial(strategies.floats,
allow_nan=False,
allow_infinity=False),
Fraction: partial(strategies.fractions,
max_denominator=MAX_NUMBER),
int: strategies.integers,
}
coordinates_strategies = strategies.sampled_from(
[factory(MIN_NUMBER, MAX_NUMBER)
for factory in coordinates_strategies_factories.values()])
| [
"functools.partial"
] | [((192, 257), 'functools.partial', 'partial', (['strategies.floats'], {'allow_nan': '(False)', 'allow_infinity': '(False)'}), '(strategies.floats, allow_nan=False, allow_infinity=False)\n', (199, 257), False, 'from functools import partial\n'), ((311, 368), 'functools.partial', 'partial', (['strategies.fractions'], {'max_denominator': 'MAX_NUMBER'}), '(strategies.fractions, max_denominator=MAX_NUMBER)\n', (318, 368), False, 'from functools import partial\n')] |
import numpy as np
from sklearn import svm
from data_loader import data_loader
N = 100
NUM_CLASS = 4
data_dir = "C:\\Users\\wsy\\Documents\\Audio\\*.m4a"
data_X, data_Y = data_loader(data_dir)
print(len(data_X))
clf_list = []
for idx in range(NUM_CLASS):
for i, X in enumerate(data_X):
if X == []:
continue
Y = data_Y[i][:, idx]
clf = svm.LinearSVC()
clf.fit(X, Y)
# validation
predicted = clf.predict(X[0:1, :])
print("Predict:", predicted, "\t Ground Truth:", Y[0])
clf_list.append(clf)
X = data_X[0]
test_X = X[3:4, :]
test_Y = data_Y[0][3:4, :]
test_predict = np.zeros(NUM_CLASS)
for idx in range(NUM_CLASS):
clf = clf_list[idx]
test_predict[idx] = clf.predict(test_X)
print("Predict:", test_predict, "\n Ground Truth:", test_Y)
| [
"data_loader.data_loader",
"numpy.zeros",
"sklearn.svm.LinearSVC"
] | [((181, 202), 'data_loader.data_loader', 'data_loader', (['data_dir'], {}), '(data_dir)\n', (192, 202), False, 'from data_loader import data_loader\n'), ((681, 700), 'numpy.zeros', 'np.zeros', (['NUM_CLASS'], {}), '(NUM_CLASS)\n', (689, 700), True, 'import numpy as np\n'), ((397, 412), 'sklearn.svm.LinearSVC', 'svm.LinearSVC', ([], {}), '()\n', (410, 412), False, 'from sklearn import svm\n')] |
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.contrib.contenttypes.models import ContentType
from django.http import Http404, HttpResponse, HttpResponseRedirect
from django.shortcuts import render
from .forms import CommentForm
from .models import Comment
@login_required
def comment_delete(request, pk):
try:
comment = Comment.objects.get(pk=pk)
except:
raise Http404
if comment.user != request.user:
response = HttpResponse("You don't have permission to do this")
response.status_code = 403
return response
if request.method == 'POST':
messages.success(request, "Comment successfully deleted")
comment.delete()
return HttpResponseRedirect(comment.content_object.get_absolute_url())
return render(request, "delete.html", {
"comment": comment,
})
def comment_thread(request, pk):
try:
comment = Comment.objects.get(pk=pk)
except:
raise Http404
initial_data = {
"content_type": comment.content_type,
"object_id": comment.object_id,
}
if not comment.is_parent:
comment = comment.parent
form = CommentForm(request.POST or None, initial=initial_data)
if form.is_valid() and request.user.is_authenticated():
c_type = form.cleaned_data.get("content_type")
content_type = ContentType.objects.get(model=c_type)
obj_id = form.cleaned_data.get("object_id")
content = form.cleaned_data.get("content")
parent_object = None
try:
parent_id = int(request.POST.get("parent_id"))
except:
parent_id = None
if parent_id:
parent_queryset = Comment.objects.filter(id=parent_id)
if parent_queryset.exists() and parent_queryset.count() == 1:
parent_object = parent_queryset.first()
new_comment, created = Comment.objects.get_or_create(
user=request.user,
content_type=content_type,
object_id=obj_id,
content=content,
parent=parent_object,
)
""" Reload the same page to clean up the input field """
return HttpResponseRedirect(
new_comment.content_object.get_absolute_url()
)
return render(request, "thread.html", {
"comment": comment,
"form": form,
})
| [
"django.shortcuts.render",
"django.http.HttpResponse",
"django.contrib.contenttypes.models.ContentType.objects.get",
"django.contrib.messages.success"
] | [((842, 894), 'django.shortcuts.render', 'render', (['request', '"""delete.html"""', "{'comment': comment}"], {}), "(request, 'delete.html', {'comment': comment})\n", (848, 894), False, 'from django.shortcuts import render\n'), ((2344, 2410), 'django.shortcuts.render', 'render', (['request', '"""thread.html"""', "{'comment': comment, 'form': form}"], {}), "(request, 'thread.html', {'comment': comment, 'form': form})\n", (2350, 2410), False, 'from django.shortcuts import render\n'), ((513, 565), 'django.http.HttpResponse', 'HttpResponse', (['"""You don\'t have permission to do this"""'], {}), '("You don\'t have permission to do this")\n', (525, 565), False, 'from django.http import Http404, HttpResponse, HttpResponseRedirect\n'), ((667, 724), 'django.contrib.messages.success', 'messages.success', (['request', '"""Comment successfully deleted"""'], {}), "(request, 'Comment successfully deleted')\n", (683, 724), False, 'from django.contrib import messages\n'), ((1418, 1455), 'django.contrib.contenttypes.models.ContentType.objects.get', 'ContentType.objects.get', ([], {'model': 'c_type'}), '(model=c_type)\n', (1441, 1455), False, 'from django.contrib.contenttypes.models import ContentType\n')] |
from django.contrib.contenttypes.fields import GenericRelation
from django.db import models
from openbook_notifications.models.notification import Notification
from openbook_posts.models import PostReaction
class PostReactionNotification(models.Model):
notification = GenericRelation(Notification, related_name='post_reaction_notifications')
post_reaction = models.ForeignKey(PostReaction, on_delete=models.CASCADE)
@classmethod
def create_post_reaction_notification(cls, post_reaction_id, owner_id):
post_reaction_notification = cls.objects.create(post_reaction_id=post_reaction_id)
Notification.create_notification(type=Notification.POST_REACTION,
content_object=post_reaction_notification,
owner_id=owner_id)
return post_reaction_notification
@classmethod
def delete_post_reaction_notification(cls, post_reaction_id, owner_id):
cls.objects.filter(post_reaction_id=post_reaction_id,
notification__owner_id=owner_id).delete()
@classmethod
def delete_post_reaction_notifications(cls, post_reaction_id):
cls.objects.filter(post_reaction_id=post_reaction_id).delete()
| [
"django.db.models.ForeignKey",
"openbook_notifications.models.notification.Notification.create_notification",
"django.contrib.contenttypes.fields.GenericRelation"
] | [((274, 347), 'django.contrib.contenttypes.fields.GenericRelation', 'GenericRelation', (['Notification'], {'related_name': '"""post_reaction_notifications"""'}), "(Notification, related_name='post_reaction_notifications')\n", (289, 347), False, 'from django.contrib.contenttypes.fields import GenericRelation\n'), ((368, 425), 'django.db.models.ForeignKey', 'models.ForeignKey', (['PostReaction'], {'on_delete': 'models.CASCADE'}), '(PostReaction, on_delete=models.CASCADE)\n', (385, 425), False, 'from django.db import models\n'), ((619, 750), 'openbook_notifications.models.notification.Notification.create_notification', 'Notification.create_notification', ([], {'type': 'Notification.POST_REACTION', 'content_object': 'post_reaction_notification', 'owner_id': 'owner_id'}), '(type=Notification.POST_REACTION,\n content_object=post_reaction_notification, owner_id=owner_id)\n', (651, 750), False, 'from openbook_notifications.models.notification import Notification\n')] |
import xarray as xr
from.np_deterministic import _pearson_r, _pearson_r_p_value, _rmse
__all__ = ['pearson_r', 'rmse']
def pearson_r(a, b, dim):
"""
Pearson's correlation coefficient.
Parameters
----------
a : Dataset, DataArray, GroupBy, Variable, numpy/dask arrays or scalars
Mix of labeled and/or unlabeled arrays to which to apply the function.
b : Dataset, DataArray, GroupBy, Variable, numpy/dask arrays or scalars
Mix of labeled and/or unlabeled arrays to which to apply the function.
dim : str
The dimension to apply the correlation along.
Returns
-------
Single value or tuple of Dataset, DataArray, Variable, dask.array.Array or
numpy.ndarray, the first type on that list to appear on an input.
Pearson's correlation coefficient.
See Also
--------
scipy.stats.pearsonr
xarray.apply_unfunc
"""
return xr.apply_ufunc(_pearson_r, a, b,
input_core_dims=[[dim], [dim]],
kwargs={'axis': -1})
def pearson_r_p_value(a, b, dim):
"""
2-tailed p-value associated with pearson's correlation coefficient.
Parameters
----------
a : Dataset, DataArray, GroupBy, Variable, numpy/dask arrays or scalars
Mix of labeled and/or unlabeled arrays to which to apply the function.
b : Dataset, DataArray, GroupBy, Variable, numpy/dask arrays or scalars
Mix of labeled and/or unlabeled arrays to which to apply the function.
dim : str
The dimension to apply the correlation along.
Returns
-------
Single value or tuple of Dataset, DataArray, Variable, dask.array.Array or
numpy.ndarray, the first type on that list to appear on an input.
2-tailed p-value.
See Also
--------
scipy.stats.pearsonr
xarray.apply_unfunc
"""
return xr.apply_ufunc(_pearson_r_p_value, a, b,
input_core_dims=[[dim], [dim]],
kwargs={'axis': -1})
def rmse(a, b, dim):
"""
Root Mean Squared Error.
Parameters
----------
a : Dataset, DataArray, GroupBy, Variable, numpy/dask arrays or scalars
Mix of labeled and/or unlabeled arrays to which to apply the function.
b : Dataset, DataArray, GroupBy, Variable, numpy/dask arrays or scalars
Mix of labeled and/or unlabeled arrays to which to apply the function.
dim : str
The dimension to apply the correlation along.
Returns
-------
Single value or tuple of Dataset, DataArray, Variable, dask.array.Array or
numpy.ndarray, the first type on that list to appear on an input.
Root Mean Squared Error.
See Also
--------
sklearn.metrics.mean_squared_error
xarray.apply_unfunc
"""
return xr.apply_ufunc(_rmse, a, b,
input_core_dims=[[dim], [dim]],
kwargs={'axis': -1})
| [
"xarray.apply_ufunc"
] | [((920, 1010), 'xarray.apply_ufunc', 'xr.apply_ufunc', (['_pearson_r', 'a', 'b'], {'input_core_dims': '[[dim], [dim]]', 'kwargs': "{'axis': -1}"}), "(_pearson_r, a, b, input_core_dims=[[dim], [dim]], kwargs={\n 'axis': -1})\n", (934, 1010), True, 'import xarray as xr\n'), ((1887, 1984), 'xarray.apply_ufunc', 'xr.apply_ufunc', (['_pearson_r_p_value', 'a', 'b'], {'input_core_dims': '[[dim], [dim]]', 'kwargs': "{'axis': -1}"}), "(_pearson_r_p_value, a, b, input_core_dims=[[dim], [dim]],\n kwargs={'axis': -1})\n", (1901, 1984), True, 'import xarray as xr\n'), ((2826, 2911), 'xarray.apply_ufunc', 'xr.apply_ufunc', (['_rmse', 'a', 'b'], {'input_core_dims': '[[dim], [dim]]', 'kwargs': "{'axis': -1}"}), "(_rmse, a, b, input_core_dims=[[dim], [dim]], kwargs={'axis': -1}\n )\n", (2840, 2911), True, 'import xarray as xr\n')] |
import pandas as pd
from IPython.core.display import display, HTML
def table(data,
title="Descriptive Stats",
sub_title="",
table_width=630,
indexcol_width=150,
return_html=False):
'''Displays a publication quality data table
with any number of columns and aspects. Works
best for 6-7 or less columns datasets.
data | DataFrame | a dataframe with three columns of numeric data
title | str | the main title of the table
sub_title | str | shown below the title
table_width | int | the width of the table in pixels
indexcol_width | int | the width of th index column in pixels.
Width of the data columns is computed based on `table_width` and
`indexcol_width` values.
'''
html = '''
<style type=\"text/css\">
.tg {
border-collapse:collapse;
border-spacing:0;
border:none;}
.tg td {
font-family: Arial, sans-serif;
font-size:14px;
padding:10px 5px;
border-style:solid;
border-width:0px;
overflow:hidden;
word-break:normal;
}
.tg th {
font-family:Arial, sans-serif,
sans-serif;
font-size:14px;
font-weight:normal;
padding:10px 5px;
border-style:solid;
border-width:0px;
overflow:hidden;
word-break:normal;
}
.tg .tg-index {
font-family:Verdana, Geneva, sans-serif !important;
text-align: left;
padding-left: 10px;
vertical-align:top
}
.tg .tg-anay {
font-family:Verdana, Geneva, sans-serif !important;
text-align:right;
vertical-align:top
}
.tg .tg-jua3 {
font-weight:bold;
font-family:Verdana, Geneva, sans-serif !important;
text-align:right;vertical-align:top
}
hr {
height: 1px;
background-color: #333;
padding: 0px;
margin: 0px;
}
.hr2 {
height: 2px !important;
background-color: #333;
}
table {
table-layout: fixed;
width: 500px;
border-style:hidden;
border-collapse: collapse;\"
margin-top: 0px;
padding-top: 0px;
}
.title {
font-family: Arial, sans-serif;
font-style: italic;
font-size: 22px;
font-weight: bold;
padding-bottom: 0px;
}
.sub_title {
font-size: 18px;
font-family: Arial, sans-serif;
margin-top: 8px !important;
padding-bottom: 12px;
}
</style>
<table class=\"tg\">
<colgroup>
<col style=\"width: _INDEXCOL_WIDTH_px\">
<col style=\"width: _COL_WIDTH_px\">
<col style=\"width: _COL_WIDTH_px\">
<col style=\"width: _COL_WIDTH_px\">
<col style=\"width: _COL_WIDTH_px\">
</colgroup>
<p class='title'> _TABLE_TITLE_ </p>
<p class='sub_title'> _TABLE_SUBTITLE_ </p>
<hr align=\"left\", width=\"_TABLE_WIDTH_\">
<tr>
<th class=\"tg-index\"></th>
'''
# add columns
for col in data.columns:
html += '<th class=\"tg-anay\">' + str(col) + '</th>'
for index in data.index:
html += '</tr><tr><td class=\"tg-index\">' + str(index) + '</td>'
for col in data.columns:
html += '<td class=\"tg-jua3\">' + f'{data.loc[index][col]:,}' + '</td>'
html +='''
</tr>
</table>
<hr align=\"left\", width=\"_TABLE_WIDTH_\">
'''
col_width = int((table_width - indexcol_width) / len(data.columns))
html = html.replace('_TABLE_TITLE_', title)
html = html.replace('_TABLE_SUBTITLE_', sub_title)
html = html.replace('_TABLE_WIDTH_', str(table_width))
html = html.replace('_COL_WIDTH_', str(col_width))
html = html.replace('_INDEXCOL_WIDTH_', str(indexcol_width))
display(HTML(html))
if return_html:
return print(html) | [
"IPython.core.display.HTML"
] | [((4932, 4942), 'IPython.core.display.HTML', 'HTML', (['html'], {}), '(html)\n', (4936, 4942), False, 'from IPython.core.display import display, HTML\n')] |
#!/usr/bin/python
"""class Strategy"""
import models
from models.base_model import BaseModel, Base
from sqlalchemy import Column, String, Float, ForeignKey
from sqlalchemy.orm import relationship
class Strategy(BaseModel, Base):
"""Representation of a Strategy"""
__tablename__ = 'strategies'
name = Column(String(128), nullable=False)
backtests = relationship("Backtest",
backref="strategies",
cascade="all, delete, delete-orphan")
param_0_name = Column(String(60), nullable=False)
param_0_value = Column(Float, nullable=False)
param_1_name = Column(String(60), default=None)
param_1_value = Column(Float, default=None)
param_2_name = Column(String(60), default=None)
param_2_value = Column(Float, default=None)
param_3_name = Column(String(60), default=None)
param_3_value = Column(Float, default=None)
param_4_name = Column(String(60), default=None)
param_4_value = Column(Float, default=None)
param_5_name = Column(String(60), default=None)
param_5_value = Column(Float, default=None)
param_6_name = Column(String(60), default=None)
param_6_value = Column(Float, default=None)
param_7_name = Column(String(60), default=None)
param_7_value = Column(Float, default=None)
param_8_name = Column(String(60), default=None)
param_8_value = Column(Float, default=None)
param_9_name = Column(String(60), default=None)
param_9_value = Column(Float, default=None)
def __init__(self, *args, **kwargs):
"""initializes strategy"""
super().__init__(*args, **kwargs)
| [
"sqlalchemy.orm.relationship",
"sqlalchemy.String",
"sqlalchemy.Column"
] | [((367, 456), 'sqlalchemy.orm.relationship', 'relationship', (['"""Backtest"""'], {'backref': '"""strategies"""', 'cascade': '"""all, delete, delete-orphan"""'}), "('Backtest', backref='strategies', cascade=\n 'all, delete, delete-orphan')\n", (379, 456), False, 'from sqlalchemy.orm import relationship\n'), ((584, 613), 'sqlalchemy.Column', 'Column', (['Float'], {'nullable': '(False)'}), '(Float, nullable=False)\n', (590, 613), False, 'from sqlalchemy import Column, String, Float, ForeignKey\n'), ((686, 713), 'sqlalchemy.Column', 'Column', (['Float'], {'default': 'None'}), '(Float, default=None)\n', (692, 713), False, 'from sqlalchemy import Column, String, Float, ForeignKey\n'), ((786, 813), 'sqlalchemy.Column', 'Column', (['Float'], {'default': 'None'}), '(Float, default=None)\n', (792, 813), False, 'from sqlalchemy import Column, String, Float, ForeignKey\n'), ((886, 913), 'sqlalchemy.Column', 'Column', (['Float'], {'default': 'None'}), '(Float, default=None)\n', (892, 913), False, 'from sqlalchemy import Column, String, Float, ForeignKey\n'), ((986, 1013), 'sqlalchemy.Column', 'Column', (['Float'], {'default': 'None'}), '(Float, default=None)\n', (992, 1013), False, 'from sqlalchemy import Column, String, Float, ForeignKey\n'), ((1086, 1113), 'sqlalchemy.Column', 'Column', (['Float'], {'default': 'None'}), '(Float, default=None)\n', (1092, 1113), False, 'from sqlalchemy import Column, String, Float, ForeignKey\n'), ((1186, 1213), 'sqlalchemy.Column', 'Column', (['Float'], {'default': 'None'}), '(Float, default=None)\n', (1192, 1213), False, 'from sqlalchemy import Column, String, Float, ForeignKey\n'), ((1286, 1313), 'sqlalchemy.Column', 'Column', (['Float'], {'default': 'None'}), '(Float, default=None)\n', (1292, 1313), False, 'from sqlalchemy import Column, String, Float, ForeignKey\n'), ((1386, 1413), 'sqlalchemy.Column', 'Column', (['Float'], {'default': 'None'}), '(Float, default=None)\n', (1392, 1413), False, 'from sqlalchemy import Column, String, Float, ForeignKey\n'), ((1486, 1513), 'sqlalchemy.Column', 'Column', (['Float'], {'default': 'None'}), '(Float, default=None)\n', (1492, 1513), False, 'from sqlalchemy import Column, String, Float, ForeignKey\n'), ((322, 333), 'sqlalchemy.String', 'String', (['(128)'], {}), '(128)\n', (328, 333), False, 'from sqlalchemy import Column, String, Float, ForeignKey\n'), ((536, 546), 'sqlalchemy.String', 'String', (['(60)'], {}), '(60)\n', (542, 546), False, 'from sqlalchemy import Column, String, Float, ForeignKey\n'), ((640, 650), 'sqlalchemy.String', 'String', (['(60)'], {}), '(60)\n', (646, 650), False, 'from sqlalchemy import Column, String, Float, ForeignKey\n'), ((740, 750), 'sqlalchemy.String', 'String', (['(60)'], {}), '(60)\n', (746, 750), False, 'from sqlalchemy import Column, String, Float, ForeignKey\n'), ((840, 850), 'sqlalchemy.String', 'String', (['(60)'], {}), '(60)\n', (846, 850), False, 'from sqlalchemy import Column, String, Float, ForeignKey\n'), ((940, 950), 'sqlalchemy.String', 'String', (['(60)'], {}), '(60)\n', (946, 950), False, 'from sqlalchemy import Column, String, Float, ForeignKey\n'), ((1040, 1050), 'sqlalchemy.String', 'String', (['(60)'], {}), '(60)\n', (1046, 1050), False, 'from sqlalchemy import Column, String, Float, ForeignKey\n'), ((1140, 1150), 'sqlalchemy.String', 'String', (['(60)'], {}), '(60)\n', (1146, 1150), False, 'from sqlalchemy import Column, String, Float, ForeignKey\n'), ((1240, 1250), 'sqlalchemy.String', 'String', (['(60)'], {}), '(60)\n', (1246, 1250), False, 'from sqlalchemy import Column, String, Float, ForeignKey\n'), ((1340, 1350), 'sqlalchemy.String', 'String', (['(60)'], {}), '(60)\n', (1346, 1350), False, 'from sqlalchemy import Column, String, Float, ForeignKey\n'), ((1440, 1450), 'sqlalchemy.String', 'String', (['(60)'], {}), '(60)\n', (1446, 1450), False, 'from sqlalchemy import Column, String, Float, ForeignKey\n')] |
# Copyright 2016 The Johns Hopkins University Applied Physics Laboratory
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
from django.db import transaction
from django.db.models.deletion import ProtectedError
from django.http import HttpResponse
from rest_framework import generics
from rest_framework import status
from rest_framework.response import Response
from rest_framework.views import APIView
from guardian.shortcuts import get_objects_for_user
from django.utils import timezone
from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes
from bosscore.lookup import LookUpKey
from bosscore.permissions import BossPermissionManager
from bosscore.privileges import check_role
from bosscore.serializers import CollectionSerializer, ExperimentSerializer, ChannelSerializer, \
CoordinateFrameSerializer, CoordinateFrameUpdateSerializer, ExperimentReadSerializer, ChannelReadSerializer, \
ExperimentUpdateSerializer, ChannelUpdateSerializer, CoordinateFrameDeleteSerializer
from bosscore.models import Collection, Experiment, Channel, CoordinateFrame
from bosscore.constants import ADMIN_GRP
from bossutils.configuration import BossConfig
from bossutils.logger import bossLogger
boss_config = BossConfig()
try:
DEFAULT_CUBOID_BUCKET_NAME = 'cuboids.' + boss_config['system']['fqdn'].split('.', 1)[1]
except Exception as ex:
DEFAULT_CUBOID_BUCKET_NAME = ''
bossLogger().error(f'Failed getting system.fqdn from boss.config: {ex}')
class CollectionDetail(APIView):
"""
View to access a collection object
"""
def get(self, request, collection):
"""
Get a single instance of a collection
Args:
request: DRF Request object
collection: Collection name specifying the collection you want
Returns:
Collection
"""
try:
collection_obj = Collection.objects.get(name=collection)
# Check for permissions
if collection_obj is None:
return BossResourceNotFoundError(collection)
if collection_obj.public or request.user.has_perm("read", collection_obj):
if collection_obj.to_be_deleted is not None:
return BossHTTPError("Invalid Request. This Resource has been marked for deletion",
ErrorCodes.RESOURCE_MARKED_FOR_DELETION)
serializer = CollectionSerializer(collection_obj)
data = serializer.data
data['experiments'] = serializer.get_experiments_permissions(collection_obj,request.user)
return Response(data, status=200)
else:
return BossPermissionError('read', collection)
except Collection.DoesNotExist:
return BossResourceNotFoundError(collection)
@transaction.atomic
@check_role("resource-manager")
def post(self, request, collection):
"""Create a new collection
View to create a new collection and an associated bosskey for that collection
Args:
request: DRF Request object
collection : Collection name
Returns:
Collection
"""
col_data = request.data.copy()
col_data['name'] = collection
# Save the object
serializer = CollectionSerializer(data=col_data)
if serializer.is_valid():
serializer.save(creator=self.request.user)
collection_obj = Collection.objects.get(name=col_data['name'])
# Assign permissions to the users primary group and admin group
BossPermissionManager.add_permissions_primary_group(self.request.user, collection_obj)
BossPermissionManager.add_permissions_admin_group(collection_obj)
lookup_key = str(collection_obj.pk)
boss_key = collection_obj.name
LookUpKey.add_lookup(lookup_key, boss_key, collection_obj.name)
return Response(serializer.data, status=status.HTTP_201_CREATED)
else:
return BossHTTPError("{}".format(serializer.errors), ErrorCodes.INVALID_POST_ARGUMENT)
@transaction.atomic
def put(self, request, collection):
"""
Update a collection using django rest framework
Args:
request: DRF Request object
collection: Collection name
Returns:
Collection
"""
try:
# Check if the object exists
collection_obj = Collection.objects.get(name=collection)
# Check for permissions
if request.user.has_perm("update", collection_obj):
serializer = CollectionSerializer(collection_obj, data=request.data, partial=True)
if serializer.is_valid():
serializer.save()
# update the lookup key if you update the name
if 'name' in request.data and request.data['name'] != collection:
lookup_key = str(collection_obj.pk)
boss_key = request.data['name']
LookUpKey.update_lookup_collection(lookup_key, boss_key, request.data['name'])
return Response(serializer.data)
else:
return BossHTTPError("{}".format(serializer.errors), ErrorCodes.INVALID_POST_ARGUMENT)
else:
return BossPermissionError('update', collection)
except Collection.DoesNotExist:
return BossResourceNotFoundError(collection)
except BossError as err:
return err.to_http()
@transaction.atomic
@check_role("resource-manager")
def delete(self, request, collection):
"""
Delete a collection
Args:
request: DRF Request object
collection: Name of collection to delete
Returns:
Http status
"""
try:
collection_obj = Collection.objects.get(name=collection)
if request.user.has_perm("delete", collection_obj):
# Are there experiments that reference it
serializer = CollectionSerializer(collection_obj)
if len(serializer.get_experiments(collection_obj)) > 0:
# This collection has experiments that reference it and cannot be deleted
return BossHTTPError("Collection {} has experiments that reference it and cannot be deleted."
"Please delete the experiments first.".format(collection),
ErrorCodes.INTEGRITY_ERROR)
collection_obj.to_be_deleted = timezone.now()
collection_obj.save()
return HttpResponse(status=204)
else:
return BossPermissionError('delete', collection)
except Collection.DoesNotExist:
return BossResourceNotFoundError(collection)
except ProtectedError:
return BossHTTPError("Cannot delete {}. It has experiments that reference it.".format(collection),
ErrorCodes.INTEGRITY_ERROR)
class CoordinateFrameDetail(APIView):
"""
View to access a cordinate frame
"""
def get(self, request, coordframe):
"""
GET requests for a single instance of a coordinateframe
Args:
request: DRF Request object
coordframe: Coordinate frame name specifying the coordinate frame you want
Returns:
CoordinateFrame
"""
try:
coordframe_obj = CoordinateFrame.objects.get(name=coordframe)
if coordframe_obj.to_be_deleted is not None:
return BossHTTPError("Invalid Request. This Resource has been marked for deletion",
ErrorCodes.RESOURCE_MARKED_FOR_DELETION)
serializer = CoordinateFrameSerializer(coordframe_obj)
return Response(serializer.data)
except CoordinateFrame.DoesNotExist:
return BossResourceNotFoundError(coordframe)
@transaction.atomic
@check_role("resource-manager")
def post(self, request, coordframe):
"""Create a new coordinate frame
View to create a new coordinate frame
Args:
request: DRF Request object
coordframe : Coordinate frame name
Returns:
CoordinateFrame
"""
coordframe_data = request.data.copy()
coordframe_data['name'] = coordframe
serializer = CoordinateFrameSerializer(data=coordframe_data)
if serializer.is_valid():
serializer.save(creator=self.request.user)
coordframe_obj = CoordinateFrame.objects.get(name=coordframe_data['name'])
# Assign permissions to the users primary group and admin group
BossPermissionManager.add_permissions_primary_group(self.request.user, coordframe_obj)
BossPermissionManager.add_permissions_admin_group(coordframe_obj)
return Response(serializer.data, status=status.HTTP_201_CREATED)
else:
return BossHTTPError("{}".format(serializer.errors), ErrorCodes.INVALID_POST_ARGUMENT)
@transaction.atomic
def put(self, request, coordframe):
"""
Update a coordinate frame using django rest framework
Args:
request: DRF Request object
coordframe: Coordinate frame name
Returns:
CoordinateFrame
"""
try:
# Check if the object exists
coordframe_obj = CoordinateFrame.objects.get(name=coordframe)
if request.user.has_perm("update", coordframe_obj):
serializer = CoordinateFrameUpdateSerializer(coordframe_obj, data=request.data, partial=True)
if serializer.is_valid():
serializer.save()
# return the object back to the user
coordframe = serializer.data['name']
coordframe_obj = CoordinateFrame.objects.get(name=coordframe)
serializer = CoordinateFrameSerializer(coordframe_obj)
return Response(serializer.data)
else:
return BossHTTPError("{}".format(serializer.errors), ErrorCodes.INVALID_POST_ARGUMENT)
else:
return BossPermissionError('update', coordframe)
except CoordinateFrame.DoesNotExist:
return BossResourceNotFoundError(coordframe)
@transaction.atomic
@check_role("resource-manager")
def delete(self, request, coordframe):
"""
Delete a coordinate frame
Args:
request: DRF Request object
coordframe: Name of coordinateframe to delete
Returns:
Http status
"""
try:
coordframe_obj = CoordinateFrame.objects.get(name=coordframe)
if request.user.has_perm("delete", coordframe_obj):
# Are there experiments that reference it
serializer = CoordinateFrameDeleteSerializer(coordframe_obj)
if len(serializer.get_valid_exps(coordframe_obj)) > 0:
# This collection has experiments that reference it and cannot be deleted
return BossHTTPError(" Coordinate frame {} has experiments that reference it and cannot be deleted."
"Please delete the experiments first.".format(coordframe),
ErrorCodes.INTEGRITY_ERROR)
coordframe_obj.to_be_deleted = timezone.now()
coordframe_obj.save()
return HttpResponse(status=204)
else:
return BossPermissionError('delete', coordframe)
except CoordinateFrame.DoesNotExist:
return BossResourceNotFoundError(coordframe)
except ProtectedError:
return BossHTTPError("Cannot delete {}. It has experiments that reference it.".format(coordframe),
ErrorCodes.INTEGRITY_ERROR)
class ExperimentDetail(APIView):
"""
View to access an experiment
"""
def get(self, request, collection, experiment):
"""
GET requests for a single instance of a experiment
Args:
request: DRF Request object
collection: Collection name specifying the collection you want
experiment: Experiment name specifying the experiment instance
Returns :
Experiment
"""
try:
collection_obj = Collection.objects.get(name=collection)
experiment_obj = Experiment.objects.get(name=experiment, collection=collection_obj)
# Check for permissions
if experiment_obj is None:
return BossResourceNotFoundError(experiment)
if experiment_obj.public or request.user.has_perm("read", experiment_obj):
if experiment_obj.to_be_deleted is not None:
return BossHTTPError("Invalid Request. This Resource has been marked for deletion",
ErrorCodes.RESOURCE_MARKED_FOR_DELETION)
serializer = ExperimentReadSerializer(experiment_obj)
data = serializer.data
import logging
logging.Logger('boss').debug("request.user: " + str(type(request.user)))
data['channels'] = serializer.get_channels_permissions(collection_obj,experiment_obj,request.user)
return Response(data)
else:
return BossPermissionError('read', experiment)
except Collection.DoesNotExist:
return BossResourceNotFoundError(collection)
except Experiment.DoesNotExist:
return BossResourceNotFoundError(experiment)
@transaction.atomic
@check_role("resource-manager")
def post(self, request, collection, experiment):
"""Create a new experiment
View to create a new experiment and an associated bosskey for that experiment
Args:
request: DRF Request object
collection : Collection name
experiment : Experiment name
Returns:
Experiment
"""
experiment_data = request.data.copy()
experiment_data['name'] = experiment
try:
# Get the collection information
collection_obj = Collection.objects.get(name=collection)
if request.user.has_perm("add", collection_obj):
experiment_data['collection'] = collection_obj.pk
# Update the coordinate frame
if 'coord_frame' not in experiment_data:
return BossHTTPError("This request requires a valid coordinate frame",
ErrorCodes.INVALID_POST_ARGUMENT)
coord_frame_obj = CoordinateFrame.objects.get(name=experiment_data['coord_frame'])
experiment_data['coord_frame'] = coord_frame_obj.pk
serializer = ExperimentSerializer(data=experiment_data)
if serializer.is_valid():
serializer.save(creator=self.request.user)
experiment_obj = Experiment.objects.get(name=experiment_data['name'], collection=collection_obj)
# Assign permissions to the users primary group and admin group
BossPermissionManager.add_permissions_primary_group(self.request.user, experiment_obj)
BossPermissionManager.add_permissions_admin_group(experiment_obj)
lookup_key = str(collection_obj.pk) + '&' + str(experiment_obj.pk)
boss_key = collection_obj.name + '&' + experiment_obj.name
LookUpKey.add_lookup(lookup_key, boss_key, collection_obj.name, experiment_obj.name)
serializer = ExperimentReadSerializer(experiment_obj)
return Response(serializer.data, status=status.HTTP_201_CREATED)
else:
return BossHTTPError("{}".format(serializer.errors), ErrorCodes.INVALID_POST_ARGUMENT)
else:
return BossPermissionError('add', collection)
except Collection.DoesNotExist:
return BossResourceNotFoundError(collection)
except CoordinateFrame.DoesNotExist:
return BossResourceNotFoundError(experiment_data['coord_frame'])
except ValueError:
return BossHTTPError("Value Error.Collection id {} in post data needs to be an integer"
.format(experiment_data['collection']), ErrorCodes.TYPE_ERROR)
@transaction.atomic
def put(self, request, collection, experiment):
"""
Update a experiment using django rest framework
Args:
request: DRF Request object
collection: Collection name
experiment : Experiment name for the new experiment
Returns:
Experiment
"""
try:
# Check if the object exists
collection_obj = Collection.objects.get(name=collection)
experiment_obj = Experiment.objects.get(name=experiment, collection=collection_obj)
if request.user.has_perm("update", experiment_obj):
serializer = ExperimentUpdateSerializer(experiment_obj, data=request.data, partial=True)
if serializer.is_valid():
serializer.save()
# update the lookup key if you update the name
if 'name' in request.data and request.data['name'] != experiment:
lookup_key = str(collection_obj.pk) + '&' + str(experiment_obj.pk)
boss_key = collection_obj.name + '&' + request.data['name']
LookUpKey.update_lookup_experiment(lookup_key, boss_key, collection_obj.name, request.data['name'])
# return the object back to the user
experiment = serializer.data['name']
experiment_obj = Experiment.objects.get(name=experiment, collection=collection_obj)
serializer = ExperimentReadSerializer(experiment_obj)
return Response(serializer.data)
else:
return BossHTTPError("{}".format(serializer.errors), ErrorCodes.INVALID_POST_ARGUMENT)
else:
return BossPermissionError('update', experiment)
except Collection.DoesNotExist:
return BossResourceNotFoundError(collection)
except Experiment.DoesNotExist:
return BossResourceNotFoundError(experiment)
except BossError as err:
return err.to_http()
@transaction.atomic
@check_role("resource-manager")
def delete(self, request, collection, experiment):
"""
Delete a experiment
Args:
request: DRF Request object
collection: Name of collection
experiment: Experiment name to delete
Returns:
Http status
"""
try:
collection_obj = Collection.objects.get(name=collection)
experiment_obj = Experiment.objects.get(name=experiment, collection=collection_obj)
if request.user.has_perm("delete", experiment_obj):
# Are there channels that reference it
serializer = ExperimentReadSerializer(experiment_obj)
if len(serializer.get_channels(experiment_obj)) > 0:
# This experiment has channels that reference it and cannot be deleted
return BossHTTPError(" Experiment {} has channels that reference it and cannot be deleted."
"Please delete the channels first.".format(experiment),
ErrorCodes.INTEGRITY_ERROR)
experiment_obj.to_be_deleted = timezone.now()
experiment_obj.save()
return HttpResponse(status=204)
else:
return BossPermissionError('delete', experiment)
except Collection.DoesNotExist:
return BossResourceNotFoundError(collection)
except Experiment.DoesNotExist:
return BossResourceNotFoundError(experiment)
except ProtectedError:
return BossHTTPError("Cannot delete {}. It has channels that reference it."
.format(experiment), ErrorCodes.INTEGRITY_ERROR)
class ChannelDetail(APIView):
"""
View to access a channel
"""
@staticmethod
def validate_source_related_channels(experiment, source_channels, related_channels):
"""
Validate that the list of source and related channels are channels that exist
Args:
experiment:
source_channels:
related_channels:
Returns:
"""
common = set(source_channels) & set(related_channels)
if len(common) > 0:
raise BossError("Related channels have to be different from source channels",
ErrorCodes.INVALID_POST_ARGUMENT)
source_channel_obj = []
related_channel_obj = []
try:
for name in source_channels:
source_channel_obj.append(Channel.objects.get(name=name, experiment=experiment))
for name in related_channels:
related_channel_obj.append(Channel.objects.get(name=name, experiment=experiment))
return (source_channel_obj,related_channel_obj)
except Channel.DoesNotExist:
raise BossError("Invalid channel names {} in the list of source/related channels channels ".format(name),
ErrorCodes.INVALID_POST_ARGUMENT)
@staticmethod
def add_source_related_channels(channel, experiment, source_channels, related_channels):
"""
Add a list of source and related channels
Args:
related_channels:
source_channels:
experiment:
channel:
Returns:
list : A list of channels id's if the list is valid
"""
try:
for source_channel in source_channels:
channel.add_source(source_channel)
for related_channel in related_channels:
channel.related.add(related_channel.pk)
channel.save()
return channel
except Exception as err:
channel.delete()
raise BossError("Exception adding source/related channels.{}".format(err), ErrorCodes.INVALID_POST_ARGUMENT)
@staticmethod
def update_source_related_channels(channel, experiment, source_channels, related_channels):
"""
Update a list of source and related channels
Args:
related_channels: New list of related channels
source_channels: New list of source channels
experiment: Experiment for the current channel
channel: Curren channel
Returns:
Updated Channel
"""
try:
# update ist of sources
# Get all the source
cur_sources = channel.sources.all()
# Get the list of sources to remove
rm_sources = [ch for ch in cur_sources if ch not in source_channels]
for source in rm_sources:
channel.remove_source(source)
# add new sources
add_sources = [ch for ch in source_channels if ch not in cur_sources]
for source_channel in add_sources:
channel.add_source(source_channel)
cur_related = channel.related.all()
rm_related = [ch for ch in cur_related if ch not in related_channels]
for related in rm_related:
channel.related.remove(related)
add_related = [ch for ch in related_channels if ch not in cur_related]
for related_channel in add_related:
channel.related.add(related_channel.pk)
channel.save()
return channel
except Exception as err:
channel.delete()
raise BossError("Exception adding source/related channels.{}".format(err), ErrorCodes.INVALID_POST_ARGUMENT)
def get(self, request, collection, experiment, channel):
"""
Retrieve information about a channel.
Args:
request: DRF Request object
collection: Collection name
experiment: Experiment name
channel: Channel name
Returns :
Channel
"""
try:
collection_obj = Collection.objects.get(name=collection)
experiment_obj = Experiment.objects.get(name=experiment, collection=collection_obj)
channel_obj = Channel.objects.get(name=channel, experiment=experiment_obj)
# Check for permissions
if channel_obj is None:
return BossResourceNotFoundError(channel)
if channel_obj.public or request.user.has_perm("read", channel_obj):
if channel_obj.to_be_deleted is not None:
return BossHTTPError("Invalid Request. This Resource has been marked for deletion",
ErrorCodes.RESOURCE_MARKED_FOR_DELETION)
serializer = ChannelReadSerializer(channel_obj)
return Response(serializer.data)
else:
return BossPermissionError('read', channel)
except Collection.DoesNotExist:
return BossResourceNotFoundError(collection)
except Experiment.DoesNotExist:
return BossResourceNotFoundError(experiment)
except Channel.DoesNotExist:
return BossResourceNotFoundError(channel)
except ValueError:
return BossHTTPError("Value Error in post data", ErrorCodes.TYPE_ERROR)
@transaction.atomic
@check_role("resource-manager")
def post(self, request, collection, experiment, channel):
"""
Post a new Channel
Args:
request: DRF Request object
collection: Collection name
experiment: Experiment name
channel: Channel name
Returns :
Channel
"""
channel_data = request.data.copy()
channel_data['name'] = channel
try:
is_admin = BossPermissionManager.is_in_group(request.user, ADMIN_GRP)
if 'bucket' in channel_data and channel_data['bucket'] and not is_admin:
return BossHTTPError('Only admins can set bucket name', ErrorCodes.MISSING_PERMISSION)
if 'cv_path' in channel_data and channel_data['cv_path'] and not is_admin:
return BossHTTPError('Only admins can set cv_path', ErrorCodes.MISSING_PERMISSION)
# Get the collection and experiment
collection_obj = Collection.objects.get(name=collection)
experiment_obj = Experiment.objects.get(name=experiment, collection=collection_obj)
# Check for add permissions
if request.user.has_perm("add", experiment_obj):
channel_data['experiment'] = experiment_obj.pk
use_cloudvol = channel_data.get('storage_type', None) == Channel.StorageType.CLOUD_VOLUME
cv_path = channel_data.get('cv_path', None)
if use_cloudvol and (cv_path is None or cv_path == ''):
channel_data['cv_path'] = f'/{collection}/{experiment}/{channel}'
if use_cloudvol:
# DX NOTE: For now we assume that cloudvolume channels are downsampled. This means
# that the num_hierarchy_levels in the experiment should be limited to the available
# mip levels in the cloudvolume layer.
channel_data['downsample_status'] = 'DOWNSAMPLED'
# The source and related channels are names and need to be removed from the dict before serialization
source_channels = channel_data.pop('sources', [])
related_channels = channel_data.pop('related', [])
# TODO: Removed source channel requirement for annotation channels. Future update should allow source channel from
# different collections.
# Source channels have to be included for new annotation channels
# if 'type' in channel_data and channel_data['type'] == 'annotation' and len(source_channels) == 0:
# return BossHTTPError("Annotation channels require the source channel to be set. "
# "Specify a valid source channel in the post", ErrorCodes.INVALID_POST_ARGUMENT)
# Validate the source and related channels if they are incuded
channels = self.validate_source_related_channels(experiment_obj, source_channels, related_channels)
source_channels_objs = channels[0]
related_channels_objs = channels[1]
# Validate and create the channel
serializer = ChannelSerializer(data=channel_data)
if serializer.is_valid():
serializer.save(creator=self.request.user)
channel_obj = Channel.objects.get(name=channel_data['name'], experiment=experiment_obj)
# Save source and related channels if they are valid
channel_obj = self.add_source_related_channels(channel_obj, experiment_obj, source_channels_objs,
related_channels_objs)
# Assign permissions to the users primary group and admin group
BossPermissionManager.add_permissions_primary_group(self.request.user, channel_obj)
BossPermissionManager.add_permissions_admin_group(channel_obj)
# Add Lookup key
lookup_key = str(collection_obj.pk) + '&' + str(experiment_obj.pk) + '&' + str(channel_obj.pk)
boss_key = collection_obj.name + '&' + experiment_obj.name + '&' + channel_obj.name
LookUpKey.add_lookup(lookup_key, boss_key, collection_obj.name, experiment_obj.name,
channel_obj.name)
serializer = ChannelReadSerializer(channel_obj)
return Response(serializer.data, status=status.HTTP_201_CREATED)
else:
return BossHTTPError("{}".format(serializer.errors), ErrorCodes.INVALID_POST_ARGUMENT)
else:
return BossPermissionError('add', experiment)
except Collection.DoesNotExist:
return BossResourceNotFoundError(collection)
except Experiment.DoesNotExist:
return BossResourceNotFoundError(experiment)
except Channel.DoesNotExist:
return BossResourceNotFoundError(channel)
except BossError as err:
return err.to_http()
except ValueError:
return BossHTTPError("Value Error in post data", ErrorCodes.TYPE_ERROR)
@transaction.atomic
def put(self, request, collection, experiment, channel):
"""
Update new Channel
Args:
request: DRF Request object
collection: Collection name
experiment: Experiment name
channel: Channel name
Returns :
Channel
"""
if 'name' in request.data:
channel_name = request.data['name']
else:
channel_name = channel
try:
# Check if the object exists
collection_obj = Collection.objects.get(name=collection)
experiment_obj = Experiment.objects.get(name=experiment, collection=collection_obj)
channel_obj = Channel.objects.get(name=channel, experiment=experiment_obj)
if request.user.has_perm("update", channel_obj):
data = copy.deepcopy(request.data)
is_admin = BossPermissionManager.is_in_group(request.user, ADMIN_GRP)
if 'storage_type' in data and not is_admin:
return BossHTTPError('Only admins can change storage_type after creation',
ErrorCodes.MISSING_PERMISSION)
if 'bucket' in data and data['bucket'] and not is_admin:
return BossHTTPError('Only admins can set bucket name', ErrorCodes.MISSING_PERMISSION)
if 'cv_path' in data and data['cv_path'] and not is_admin:
return BossHTTPError('Only admins can set cv_path', ErrorCodes.MISSING_PERMISSION)
# The source and related channels are names and need to be removed from the dict before serialization
source_channels = data.pop('sources', [])
related_channels = data.pop('related', [])
# Validate the source and related channels if they are incuded
channels = self.validate_source_related_channels(experiment_obj, source_channels, related_channels)
source_channels_objs = channels[0]
related_channels_objs = channels[1]
serializer = ChannelUpdateSerializer(channel_obj, data=data, partial=True)
if serializer.is_valid():
serializer.save()
channel_obj = Channel.objects.get(name=channel_name, experiment=experiment_obj)
# Save source and related channels if they are valid
channel_obj = self.update_source_related_channels(channel_obj, experiment_obj, source_channels_objs,
related_channels_objs)
# update the lookup key if you update the name
if 'name' in data and data['name'] != channel:
lookup_key = str(collection_obj.pk) + '&' + str(experiment_obj.pk) + '&' \
+ str(channel_obj.pk)
boss_key = collection_obj.name + '&' + experiment_obj.name + '&' + data['name']
LookUpKey.update_lookup(lookup_key, boss_key, collection_obj.name, experiment_obj.name,
data['name'])
# return the object back to the user
channel = serializer.data['name']
channel_obj = Channel.objects.get(name=channel, experiment=experiment_obj)
serializer = ChannelReadSerializer(channel_obj)
return Response(serializer.data)
else:
return BossHTTPError("{}".format(serializer.errors), ErrorCodes.INVALID_POST_ARGUMENT)
else:
return BossPermissionError('update', channel)
except Collection.DoesNotExist:
return BossResourceNotFoundError(collection)
except Experiment.DoesNotExist:
return BossResourceNotFoundError(experiment)
except Channel.DoesNotExist:
return BossResourceNotFoundError(channel)
@transaction.atomic
@check_role("resource-manager")
def delete(self, request, collection, experiment, channel):
"""
Delete a Channel
Args:
request: DRF Request object
collection: Collection name
experiment: Experiment name
channel: Channel name
Returns :
Http status
"""
try:
collection_obj = Collection.objects.get(name=collection)
experiment_obj = Experiment.objects.get(name=experiment, collection=collection_obj)
channel_obj = Channel.objects.get(name=channel, experiment=experiment_obj)
if request.user.has_perm("delete", channel_obj):
# The channel cannot be deleted if this is the source of any other channels
derived_channels = channel_obj.get_derived()
if len(derived_channels) > 0:
return BossHTTPError("Channel {} is the source channel of other channels and cannot be deleted"
.format(channel), ErrorCodes.INTEGRITY_ERROR)
channel_obj.to_be_deleted = timezone.now()
channel_obj.save()
return HttpResponse(status=204)
else:
return BossPermissionError('delete', channel)
except Collection.DoesNotExist:
return BossResourceNotFoundError(collection)
except Experiment.DoesNotExist:
return BossResourceNotFoundError(experiment)
except Channel.DoesNotExist:
return BossResourceNotFoundError(channel)
except ProtectedError:
return BossHTTPError("Cannot delete {}. It has channels that reference it.".format(channel),
ErrorCodes.INTEGRITY_ERROR)
class CollectionList(generics.ListAPIView):
"""
List all collections or create a new collection
"""
# Cache the public collections.
queryset = Collection.objects.filter(public=True)
serializer_class = CollectionSerializer
def list(self, request, *args, **kwargs):
"""
Display only objects that a user has access to
Args:
request: DRF request
*args:
**kwargs:
Returns: Collections that user has view permissions on
"""
collections = get_objects_for_user(request.user, 'read', klass=Collection).exclude(to_be_deleted__isnull=False)
all_colls = collections.union(self.get_queryset())
data = {"collections": [collection.name for collection in all_colls]}
return Response(data)
class ExperimentList(generics.ListAPIView):
"""
List all experiments
"""
queryset = Experiment.objects.all()
serializer_class = ExperimentSerializer
def list(self, request, collection, *args, **kwargs):
"""
return experiments for the collection that the user has permissions for
Args:
request: DRF request
collection : Collection name
*args:
**kwargs:
Returns: Experiments that user has view permissions on and are not marked for deletion
"""
collection_obj = Collection.objects.get(name=collection)
experiments = self.get_queryset().filter(collection=collection_obj).exclude(to_be_deleted__isnull=False)
data = {
"experiments": [
exp.name for exp in experiments
if exp.public == True or request.user.has_perm('read', exp)
]
}
return Response(data)
class ChannelList(generics.ListAPIView):
"""
List all channels
"""
queryset = Channel.objects.all()
serializer_class = ChannelSerializer
def list(self, request, collection, experiment, *args, **kwargs):
"""
Display only objects that a user has access to
Args:
request: DRF request
collection: Collection Name
experiment: Experiment Name
*args:
**kwargs:
Returns: Channel that user has view permissions on
"""
collection_obj = Collection.objects.get(name=collection)
experiment_obj = Experiment.objects.get(name=experiment, collection=collection_obj)
channels = self.get_queryset().filter(experiment=experiment_obj).exclude(to_be_deleted__isnull=False)
data = {
"channels": [
channel.name for channel in channels
if channel.public == True or request.user.has_perm('read', channel)
]
}
return Response(data)
class CoordinateFrameList(generics.ListCreateAPIView):
"""
List all coordinate frames
"""
queryset = CoordinateFrame.objects.all()
serializer_class = CoordinateFrameSerializer
def list(self, request, *args, **kwargs):
"""
Display only objects that a user has access to
Args:
request: DRF request
*args:
**kwargs:
Returns: Coordinate frames that user has view permissions on
"""
# Note: the line below returns all coordinate frames that the user has read permissions on
#coords = get_objects_for_user(request.user, 'read', klass=CoordinateFrame).exclude(to_be_deleted__isnull=False)
if 'owner' in request.query_params:
owner_flag = request.query_params.get('owner', "False")
else:
owner_flag = "False"
if str.capitalize(owner_flag) == "True":
coords = CoordinateFrame.objects.filter(creator=request.user).exclude(to_be_deleted__isnull=False)
else:
coords = CoordinateFrame.objects.all().exclude(to_be_deleted__isnull=False)
data = {"coords": [coord.name for coord in coords]}
return Response(data)
| [
"bosscore.serializers.ChannelSerializer",
"bossutils.logger.bossLogger",
"bosscore.models.Experiment.objects.all",
"bosscore.models.CoordinateFrame.objects.filter",
"bosscore.models.Collection.objects.filter",
"bosscore.serializers.ChannelUpdateSerializer",
"copy.deepcopy",
"bosscore.lookup.LookUpKey.... | [((1771, 1783), 'bossutils.configuration.BossConfig', 'BossConfig', ([], {}), '()\n', (1781, 1783), False, 'from bossutils.configuration import BossConfig\n'), ((3413, 3443), 'bosscore.privileges.check_role', 'check_role', (['"""resource-manager"""'], {}), "('resource-manager')\n", (3423, 3443), False, 'from bosscore.privileges import check_role\n'), ((6207, 6237), 'bosscore.privileges.check_role', 'check_role', (['"""resource-manager"""'], {}), "('resource-manager')\n", (6217, 6237), False, 'from bosscore.privileges import check_role\n'), ((8711, 8741), 'bosscore.privileges.check_role', 'check_role', (['"""resource-manager"""'], {}), "('resource-manager')\n", (8721, 8741), False, 'from bosscore.privileges import check_role\n'), ((11161, 11191), 'bosscore.privileges.check_role', 'check_role', (['"""resource-manager"""'], {}), "('resource-manager')\n", (11171, 11191), False, 'from bosscore.privileges import check_role\n'), ((14527, 14557), 'bosscore.privileges.check_role', 'check_role', (['"""resource-manager"""'], {}), "('resource-manager')\n", (14537, 14557), False, 'from bosscore.privileges import check_role\n'), ((19489, 19519), 'bosscore.privileges.check_role', 'check_role', (['"""resource-manager"""'], {}), "('resource-manager')\n", (19499, 19519), False, 'from bosscore.privileges import check_role\n'), ((26731, 26761), 'bosscore.privileges.check_role', 'check_role', (['"""resource-manager"""'], {}), "('resource-manager')\n", (26741, 26761), False, 'from bosscore.privileges import check_role\n'), ((36114, 36144), 'bosscore.privileges.check_role', 'check_role', (['"""resource-manager"""'], {}), "('resource-manager')\n", (36124, 36144), False, 'from bosscore.privileges import check_role\n'), ((38070, 38108), 'bosscore.models.Collection.objects.filter', 'Collection.objects.filter', ([], {'public': '(True)'}), '(public=True)\n', (38095, 38108), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((38822, 38846), 'bosscore.models.Experiment.objects.all', 'Experiment.objects.all', ([], {}), '()\n', (38844, 38846), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((39818, 39839), 'bosscore.models.Channel.objects.all', 'Channel.objects.all', ([], {}), '()\n', (39837, 39839), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((40917, 40946), 'bosscore.models.CoordinateFrame.objects.all', 'CoordinateFrame.objects.all', ([], {}), '()\n', (40944, 40946), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((3880, 3915), 'bosscore.serializers.CollectionSerializer', 'CollectionSerializer', ([], {'data': 'col_data'}), '(data=col_data)\n', (3900, 3915), False, 'from bosscore.serializers import CollectionSerializer, ExperimentSerializer, ChannelSerializer, CoordinateFrameSerializer, CoordinateFrameUpdateSerializer, ExperimentReadSerializer, ChannelReadSerializer, ExperimentUpdateSerializer, ChannelUpdateSerializer, CoordinateFrameDeleteSerializer\n'), ((9143, 9190), 'bosscore.serializers.CoordinateFrameSerializer', 'CoordinateFrameSerializer', ([], {'data': 'coordframe_data'}), '(data=coordframe_data)\n', (9168, 9190), False, 'from bosscore.serializers import CollectionSerializer, ExperimentSerializer, ChannelSerializer, CoordinateFrameSerializer, CoordinateFrameUpdateSerializer, ExperimentReadSerializer, ChannelReadSerializer, ExperimentUpdateSerializer, ChannelUpdateSerializer, CoordinateFrameDeleteSerializer\n'), ((38704, 38718), 'rest_framework.response.Response', 'Response', (['data'], {}), '(data)\n', (38712, 38718), False, 'from rest_framework.response import Response\n'), ((39305, 39344), 'bosscore.models.Collection.objects.get', 'Collection.objects.get', ([], {'name': 'collection'}), '(name=collection)\n', (39327, 39344), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((39707, 39721), 'rest_framework.response.Response', 'Response', (['data'], {}), '(data)\n', (39715, 39721), False, 'from rest_framework.response import Response\n'), ((40286, 40325), 'bosscore.models.Collection.objects.get', 'Collection.objects.get', ([], {'name': 'collection'}), '(name=collection)\n', (40308, 40325), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((40351, 40417), 'bosscore.models.Experiment.objects.get', 'Experiment.objects.get', ([], {'name': 'experiment', 'collection': 'collection_obj'}), '(name=experiment, collection=collection_obj)\n', (40373, 40417), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((40783, 40797), 'rest_framework.response.Response', 'Response', (['data'], {}), '(data)\n', (40791, 40797), False, 'from rest_framework.response import Response\n'), ((41998, 42012), 'rest_framework.response.Response', 'Response', (['data'], {}), '(data)\n', (42006, 42012), False, 'from rest_framework.response import Response\n'), ((2432, 2471), 'bosscore.models.Collection.objects.get', 'Collection.objects.get', ([], {'name': 'collection'}), '(name=collection)\n', (2454, 2471), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((4034, 4079), 'bosscore.models.Collection.objects.get', 'Collection.objects.get', ([], {'name': "col_data['name']"}), "(name=col_data['name'])\n", (4056, 4079), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((4169, 4259), 'bosscore.permissions.BossPermissionManager.add_permissions_primary_group', 'BossPermissionManager.add_permissions_primary_group', (['self.request.user', 'collection_obj'], {}), '(self.request.user,\n collection_obj)\n', (4220, 4259), False, 'from bosscore.permissions import BossPermissionManager\n'), ((4268, 4333), 'bosscore.permissions.BossPermissionManager.add_permissions_admin_group', 'BossPermissionManager.add_permissions_admin_group', (['collection_obj'], {}), '(collection_obj)\n', (4317, 4333), False, 'from bosscore.permissions import BossPermissionManager\n'), ((4438, 4501), 'bosscore.lookup.LookUpKey.add_lookup', 'LookUpKey.add_lookup', (['lookup_key', 'boss_key', 'collection_obj.name'], {}), '(lookup_key, boss_key, collection_obj.name)\n', (4458, 4501), False, 'from bosscore.lookup import LookUpKey\n'), ((4522, 4579), 'rest_framework.response.Response', 'Response', (['serializer.data'], {'status': 'status.HTTP_201_CREATED'}), '(serializer.data, status=status.HTTP_201_CREATED)\n', (4530, 4579), False, 'from rest_framework.response import Response\n'), ((5055, 5094), 'bosscore.models.Collection.objects.get', 'Collection.objects.get', ([], {'name': 'collection'}), '(name=collection)\n', (5077, 5094), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((6524, 6563), 'bosscore.models.Collection.objects.get', 'Collection.objects.get', ([], {'name': 'collection'}), '(name=collection)\n', (6546, 6563), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((8187, 8231), 'bosscore.models.CoordinateFrame.objects.get', 'CoordinateFrame.objects.get', ([], {'name': 'coordframe'}), '(name=coordframe)\n', (8214, 8231), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((8492, 8533), 'bosscore.serializers.CoordinateFrameSerializer', 'CoordinateFrameSerializer', (['coordframe_obj'], {}), '(coordframe_obj)\n', (8517, 8533), False, 'from bosscore.serializers import CollectionSerializer, ExperimentSerializer, ChannelSerializer, CoordinateFrameSerializer, CoordinateFrameUpdateSerializer, ExperimentReadSerializer, ChannelReadSerializer, ExperimentUpdateSerializer, ChannelUpdateSerializer, CoordinateFrameDeleteSerializer\n'), ((8553, 8578), 'rest_framework.response.Response', 'Response', (['serializer.data'], {}), '(serializer.data)\n', (8561, 8578), False, 'from rest_framework.response import Response\n'), ((9309, 9366), 'bosscore.models.CoordinateFrame.objects.get', 'CoordinateFrame.objects.get', ([], {'name': "coordframe_data['name']"}), "(name=coordframe_data['name'])\n", (9336, 9366), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((9456, 9546), 'bosscore.permissions.BossPermissionManager.add_permissions_primary_group', 'BossPermissionManager.add_permissions_primary_group', (['self.request.user', 'coordframe_obj'], {}), '(self.request.user,\n coordframe_obj)\n', (9507, 9546), False, 'from bosscore.permissions import BossPermissionManager\n'), ((9555, 9620), 'bosscore.permissions.BossPermissionManager.add_permissions_admin_group', 'BossPermissionManager.add_permissions_admin_group', (['coordframe_obj'], {}), '(coordframe_obj)\n', (9604, 9620), False, 'from bosscore.permissions import BossPermissionManager\n'), ((9641, 9698), 'rest_framework.response.Response', 'Response', (['serializer.data'], {'status': 'status.HTTP_201_CREATED'}), '(serializer.data, status=status.HTTP_201_CREATED)\n', (9649, 9698), False, 'from rest_framework.response import Response\n'), ((10192, 10236), 'bosscore.models.CoordinateFrame.objects.get', 'CoordinateFrame.objects.get', ([], {'name': 'coordframe'}), '(name=coordframe)\n', (10219, 10236), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((11489, 11533), 'bosscore.models.CoordinateFrame.objects.get', 'CoordinateFrame.objects.get', ([], {'name': 'coordframe'}), '(name=coordframe)\n', (11516, 11533), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((13234, 13273), 'bosscore.models.Collection.objects.get', 'Collection.objects.get', ([], {'name': 'collection'}), '(name=collection)\n', (13256, 13273), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((13303, 13369), 'bosscore.models.Experiment.objects.get', 'Experiment.objects.get', ([], {'name': 'experiment', 'collection': 'collection_obj'}), '(name=experiment, collection=collection_obj)\n', (13325, 13369), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((15100, 15139), 'bosscore.models.Collection.objects.get', 'Collection.objects.get', ([], {'name': 'collection'}), '(name=collection)\n', (15122, 15139), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((17802, 17841), 'bosscore.models.Collection.objects.get', 'Collection.objects.get', ([], {'name': 'collection'}), '(name=collection)\n', (17824, 17841), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((17871, 17937), 'bosscore.models.Experiment.objects.get', 'Experiment.objects.get', ([], {'name': 'experiment', 'collection': 'collection_obj'}), '(name=experiment, collection=collection_obj)\n', (17893, 17937), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((19858, 19897), 'bosscore.models.Collection.objects.get', 'Collection.objects.get', ([], {'name': 'collection'}), '(name=collection)\n', (19880, 19897), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((19927, 19993), 'bosscore.models.Experiment.objects.get', 'Experiment.objects.get', ([], {'name': 'experiment', 'collection': 'collection_obj'}), '(name=experiment, collection=collection_obj)\n', (19949, 19993), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((21768, 21877), 'bosscore.error.BossError', 'BossError', (['"""Related channels have to be different from source channels"""', 'ErrorCodes.INVALID_POST_ARGUMENT'], {}), "('Related channels have to be different from source channels',\n ErrorCodes.INVALID_POST_ARGUMENT)\n", (21777, 21877), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((25434, 25473), 'bosscore.models.Collection.objects.get', 'Collection.objects.get', ([], {'name': 'collection'}), '(name=collection)\n', (25456, 25473), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((25503, 25569), 'bosscore.models.Experiment.objects.get', 'Experiment.objects.get', ([], {'name': 'experiment', 'collection': 'collection_obj'}), '(name=experiment, collection=collection_obj)\n', (25525, 25569), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((25596, 25656), 'bosscore.models.Channel.objects.get', 'Channel.objects.get', ([], {'name': 'channel', 'experiment': 'experiment_obj'}), '(name=channel, experiment=experiment_obj)\n', (25615, 25656), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((27202, 27260), 'bosscore.permissions.BossPermissionManager.is_in_group', 'BossPermissionManager.is_in_group', (['request.user', 'ADMIN_GRP'], {}), '(request.user, ADMIN_GRP)\n', (27235, 27260), False, 'from bosscore.permissions import BossPermissionManager\n'), ((27714, 27753), 'bosscore.models.Collection.objects.get', 'Collection.objects.get', ([], {'name': 'collection'}), '(name=collection)\n', (27736, 27753), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((27783, 27849), 'bosscore.models.Experiment.objects.get', 'Experiment.objects.get', ([], {'name': 'experiment', 'collection': 'collection_obj'}), '(name=experiment, collection=collection_obj)\n', (27805, 27849), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((32583, 32622), 'bosscore.models.Collection.objects.get', 'Collection.objects.get', ([], {'name': 'collection'}), '(name=collection)\n', (32605, 32622), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((32652, 32718), 'bosscore.models.Experiment.objects.get', 'Experiment.objects.get', ([], {'name': 'experiment', 'collection': 'collection_obj'}), '(name=experiment, collection=collection_obj)\n', (32674, 32718), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((32745, 32805), 'bosscore.models.Channel.objects.get', 'Channel.objects.get', ([], {'name': 'channel', 'experiment': 'experiment_obj'}), '(name=channel, experiment=experiment_obj)\n', (32764, 32805), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((36511, 36550), 'bosscore.models.Collection.objects.get', 'Collection.objects.get', ([], {'name': 'collection'}), '(name=collection)\n', (36533, 36550), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((36580, 36646), 'bosscore.models.Experiment.objects.get', 'Experiment.objects.get', ([], {'name': 'experiment', 'collection': 'collection_obj'}), '(name=experiment, collection=collection_obj)\n', (36602, 36646), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((36673, 36733), 'bosscore.models.Channel.objects.get', 'Channel.objects.get', ([], {'name': 'channel', 'experiment': 'experiment_obj'}), '(name=channel, experiment=experiment_obj)\n', (36692, 36733), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((1946, 1958), 'bossutils.logger.bossLogger', 'bossLogger', ([], {}), '()\n', (1956, 1958), False, 'from bossutils.logger import bossLogger\n'), ((2571, 2608), 'bosscore.error.BossResourceNotFoundError', 'BossResourceNotFoundError', (['collection'], {}), '(collection)\n', (2596, 2608), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((2973, 3009), 'bosscore.serializers.CollectionSerializer', 'CollectionSerializer', (['collection_obj'], {}), '(collection_obj)\n', (2993, 3009), False, 'from bosscore.serializers import CollectionSerializer, ExperimentSerializer, ChannelSerializer, CoordinateFrameSerializer, CoordinateFrameUpdateSerializer, ExperimentReadSerializer, ChannelReadSerializer, ExperimentUpdateSerializer, ChannelUpdateSerializer, CoordinateFrameDeleteSerializer\n'), ((3178, 3204), 'rest_framework.response.Response', 'Response', (['data'], {'status': '(200)'}), '(data, status=200)\n', (3186, 3204), False, 'from rest_framework.response import Response\n'), ((3246, 3285), 'bosscore.error.BossPermissionError', 'BossPermissionError', (['"""read"""', 'collection'], {}), "('read', collection)\n", (3265, 3285), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((3345, 3382), 'bosscore.error.BossResourceNotFoundError', 'BossResourceNotFoundError', (['collection'], {}), '(collection)\n', (3370, 3382), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((5225, 5294), 'bosscore.serializers.CollectionSerializer', 'CollectionSerializer', (['collection_obj'], {'data': 'request.data', 'partial': '(True)'}), '(collection_obj, data=request.data, partial=True)\n', (5245, 5294), False, 'from bosscore.serializers import CollectionSerializer, ExperimentSerializer, ChannelSerializer, CoordinateFrameSerializer, CoordinateFrameUpdateSerializer, ExperimentReadSerializer, ChannelReadSerializer, ExperimentUpdateSerializer, ChannelUpdateSerializer, CoordinateFrameDeleteSerializer\n'), ((5972, 6013), 'bosscore.error.BossPermissionError', 'BossPermissionError', (['"""update"""', 'collection'], {}), "('update', collection)\n", (5991, 6013), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((6073, 6110), 'bosscore.error.BossResourceNotFoundError', 'BossResourceNotFoundError', (['collection'], {}), '(collection)\n', (6098, 6110), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((6717, 6753), 'bosscore.serializers.CollectionSerializer', 'CollectionSerializer', (['collection_obj'], {}), '(collection_obj)\n', (6737, 6753), False, 'from bosscore.serializers import CollectionSerializer, ExperimentSerializer, ChannelSerializer, CoordinateFrameSerializer, CoordinateFrameUpdateSerializer, ExperimentReadSerializer, ChannelReadSerializer, ExperimentUpdateSerializer, ChannelUpdateSerializer, CoordinateFrameDeleteSerializer\n'), ((7251, 7265), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (7263, 7265), False, 'from django.utils import timezone\n'), ((7328, 7352), 'django.http.HttpResponse', 'HttpResponse', ([], {'status': '(204)'}), '(status=204)\n', (7340, 7352), False, 'from django.http import HttpResponse\n'), ((7394, 7435), 'bosscore.error.BossPermissionError', 'BossPermissionError', (['"""delete"""', 'collection'], {}), "('delete', collection)\n", (7413, 7435), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((7495, 7532), 'bosscore.error.BossResourceNotFoundError', 'BossResourceNotFoundError', (['collection'], {}), '(collection)\n', (7520, 7532), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((8312, 8433), 'bosscore.error.BossHTTPError', 'BossHTTPError', (['"""Invalid Request. This Resource has been marked for deletion"""', 'ErrorCodes.RESOURCE_MARKED_FOR_DELETION'], {}), "('Invalid Request. This Resource has been marked for deletion',\n ErrorCodes.RESOURCE_MARKED_FOR_DELETION)\n", (8325, 8433), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((8643, 8680), 'bosscore.error.BossResourceNotFoundError', 'BossResourceNotFoundError', (['coordframe'], {}), '(coordframe)\n', (8668, 8680), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((10331, 10416), 'bosscore.serializers.CoordinateFrameUpdateSerializer', 'CoordinateFrameUpdateSerializer', (['coordframe_obj'], {'data': 'request.data', 'partial': '(True)'}), '(coordframe_obj, data=request.data, partial=True\n )\n', (10362, 10416), False, 'from bosscore.serializers import CollectionSerializer, ExperimentSerializer, ChannelSerializer, CoordinateFrameSerializer, CoordinateFrameUpdateSerializer, ExperimentReadSerializer, ChannelReadSerializer, ExperimentUpdateSerializer, ChannelUpdateSerializer, CoordinateFrameDeleteSerializer\n'), ((10987, 11028), 'bosscore.error.BossPermissionError', 'BossPermissionError', (['"""update"""', 'coordframe'], {}), "('update', coordframe)\n", (11006, 11028), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((11093, 11130), 'bosscore.error.BossResourceNotFoundError', 'BossResourceNotFoundError', (['coordframe'], {}), '(coordframe)\n', (11118, 11130), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((11686, 11733), 'bosscore.serializers.CoordinateFrameDeleteSerializer', 'CoordinateFrameDeleteSerializer', (['coordframe_obj'], {}), '(coordframe_obj)\n', (11717, 11733), False, 'from bosscore.serializers import CollectionSerializer, ExperimentSerializer, ChannelSerializer, CoordinateFrameSerializer, CoordinateFrameUpdateSerializer, ExperimentReadSerializer, ChannelReadSerializer, ExperimentUpdateSerializer, ChannelUpdateSerializer, CoordinateFrameDeleteSerializer\n'), ((12237, 12251), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (12249, 12251), False, 'from django.utils import timezone\n'), ((12313, 12337), 'django.http.HttpResponse', 'HttpResponse', ([], {'status': '(204)'}), '(status=204)\n', (12325, 12337), False, 'from django.http import HttpResponse\n'), ((12379, 12420), 'bosscore.error.BossPermissionError', 'BossPermissionError', (['"""delete"""', 'coordframe'], {}), "('delete', coordframe)\n", (12398, 12420), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((12485, 12522), 'bosscore.error.BossResourceNotFoundError', 'BossResourceNotFoundError', (['coordframe'], {}), '(coordframe)\n', (12510, 12522), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((13468, 13505), 'bosscore.error.BossResourceNotFoundError', 'BossResourceNotFoundError', (['experiment'], {}), '(experiment)\n', (13493, 13505), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((13869, 13909), 'bosscore.serializers.ExperimentReadSerializer', 'ExperimentReadSerializer', (['experiment_obj'], {}), '(experiment_obj)\n', (13893, 13909), False, 'from bosscore.serializers import CollectionSerializer, ExperimentSerializer, ChannelSerializer, CoordinateFrameSerializer, CoordinateFrameUpdateSerializer, ExperimentReadSerializer, ChannelReadSerializer, ExperimentUpdateSerializer, ChannelUpdateSerializer, CoordinateFrameDeleteSerializer\n'), ((14207, 14221), 'rest_framework.response.Response', 'Response', (['data'], {}), '(data)\n', (14215, 14221), False, 'from rest_framework.response import Response\n'), ((14263, 14302), 'bosscore.error.BossPermissionError', 'BossPermissionError', (['"""read"""', 'experiment'], {}), "('read', experiment)\n", (14282, 14302), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((14362, 14399), 'bosscore.error.BossResourceNotFoundError', 'BossResourceNotFoundError', (['collection'], {}), '(collection)\n', (14387, 14399), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((14459, 14496), 'bosscore.error.BossResourceNotFoundError', 'BossResourceNotFoundError', (['experiment'], {}), '(experiment)\n', (14484, 14496), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((15573, 15637), 'bosscore.models.CoordinateFrame.objects.get', 'CoordinateFrame.objects.get', ([], {'name': "experiment_data['coord_frame']"}), "(name=experiment_data['coord_frame'])\n", (15600, 15637), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((15736, 15778), 'bosscore.serializers.ExperimentSerializer', 'ExperimentSerializer', ([], {'data': 'experiment_data'}), '(data=experiment_data)\n', (15756, 15778), False, 'from bosscore.serializers import CollectionSerializer, ExperimentSerializer, ChannelSerializer, CoordinateFrameSerializer, CoordinateFrameUpdateSerializer, ExperimentReadSerializer, ChannelReadSerializer, ExperimentUpdateSerializer, ChannelUpdateSerializer, CoordinateFrameDeleteSerializer\n'), ((16881, 16919), 'bosscore.error.BossPermissionError', 'BossPermissionError', (['"""add"""', 'collection'], {}), "('add', collection)\n", (16900, 16919), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((16979, 17016), 'bosscore.error.BossResourceNotFoundError', 'BossResourceNotFoundError', (['collection'], {}), '(collection)\n', (17004, 17016), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((17081, 17138), 'bosscore.error.BossResourceNotFoundError', 'BossResourceNotFoundError', (["experiment_data['coord_frame']"], {}), "(experiment_data['coord_frame'])\n", (17106, 17138), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((18031, 18106), 'bosscore.serializers.ExperimentUpdateSerializer', 'ExperimentUpdateSerializer', (['experiment_obj'], {'data': 'request.data', 'partial': '(True)'}), '(experiment_obj, data=request.data, partial=True)\n', (18057, 18106), False, 'from bosscore.serializers import CollectionSerializer, ExperimentSerializer, ChannelSerializer, CoordinateFrameSerializer, CoordinateFrameUpdateSerializer, ExperimentReadSerializer, ChannelReadSerializer, ExperimentUpdateSerializer, ChannelUpdateSerializer, CoordinateFrameDeleteSerializer\n'), ((19156, 19197), 'bosscore.error.BossPermissionError', 'BossPermissionError', (['"""update"""', 'experiment'], {}), "('update', experiment)\n", (19175, 19197), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((19258, 19295), 'bosscore.error.BossResourceNotFoundError', 'BossResourceNotFoundError', (['collection'], {}), '(collection)\n', (19283, 19295), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((19355, 19392), 'bosscore.error.BossResourceNotFoundError', 'BossResourceNotFoundError', (['experiment'], {}), '(experiment)\n', (19380, 19392), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((20142, 20182), 'bosscore.serializers.ExperimentReadSerializer', 'ExperimentReadSerializer', (['experiment_obj'], {}), '(experiment_obj)\n', (20166, 20182), False, 'from bosscore.serializers import CollectionSerializer, ExperimentSerializer, ChannelSerializer, CoordinateFrameSerializer, CoordinateFrameUpdateSerializer, ExperimentReadSerializer, ChannelReadSerializer, ExperimentUpdateSerializer, ChannelUpdateSerializer, CoordinateFrameDeleteSerializer\n'), ((20669, 20683), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (20681, 20683), False, 'from django.utils import timezone\n'), ((20746, 20770), 'django.http.HttpResponse', 'HttpResponse', ([], {'status': '(204)'}), '(status=204)\n', (20758, 20770), False, 'from django.http import HttpResponse\n'), ((20812, 20853), 'bosscore.error.BossPermissionError', 'BossPermissionError', (['"""delete"""', 'experiment'], {}), "('delete', experiment)\n", (20831, 20853), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((20913, 20950), 'bosscore.error.BossResourceNotFoundError', 'BossResourceNotFoundError', (['collection'], {}), '(collection)\n', (20938, 20950), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((21010, 21047), 'bosscore.error.BossResourceNotFoundError', 'BossResourceNotFoundError', (['experiment'], {}), '(experiment)\n', (21035, 21047), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((25753, 25787), 'bosscore.error.BossResourceNotFoundError', 'BossResourceNotFoundError', (['channel'], {}), '(channel)\n', (25778, 25787), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((26142, 26176), 'bosscore.serializers.ChannelReadSerializer', 'ChannelReadSerializer', (['channel_obj'], {}), '(channel_obj)\n', (26163, 26176), False, 'from bosscore.serializers import CollectionSerializer, ExperimentSerializer, ChannelSerializer, CoordinateFrameSerializer, CoordinateFrameUpdateSerializer, ExperimentReadSerializer, ChannelReadSerializer, ExperimentUpdateSerializer, ChannelUpdateSerializer, CoordinateFrameDeleteSerializer\n'), ((26200, 26225), 'rest_framework.response.Response', 'Response', (['serializer.data'], {}), '(serializer.data)\n', (26208, 26225), False, 'from rest_framework.response import Response\n'), ((26267, 26303), 'bosscore.error.BossPermissionError', 'BossPermissionError', (['"""read"""', 'channel'], {}), "('read', channel)\n", (26286, 26303), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((26364, 26401), 'bosscore.error.BossResourceNotFoundError', 'BossResourceNotFoundError', (['collection'], {}), '(collection)\n', (26389, 26401), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((26461, 26498), 'bosscore.error.BossResourceNotFoundError', 'BossResourceNotFoundError', (['experiment'], {}), '(experiment)\n', (26486, 26498), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((26555, 26589), 'bosscore.error.BossResourceNotFoundError', 'BossResourceNotFoundError', (['channel'], {}), '(channel)\n', (26580, 26589), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((26636, 26700), 'bosscore.error.BossHTTPError', 'BossHTTPError', (['"""Value Error in post data"""', 'ErrorCodes.TYPE_ERROR'], {}), "('Value Error in post data', ErrorCodes.TYPE_ERROR)\n", (26649, 26700), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((27369, 27448), 'bosscore.error.BossHTTPError', 'BossHTTPError', (['"""Only admins can set bucket name"""', 'ErrorCodes.MISSING_PERMISSION'], {}), "('Only admins can set bucket name', ErrorCodes.MISSING_PERMISSION)\n", (27382, 27448), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((27560, 27635), 'bosscore.error.BossHTTPError', 'BossHTTPError', (['"""Only admins can set cv_path"""', 'ErrorCodes.MISSING_PERMISSION'], {}), "('Only admins can set cv_path', ErrorCodes.MISSING_PERMISSION)\n", (27573, 27635), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((29974, 30010), 'bosscore.serializers.ChannelSerializer', 'ChannelSerializer', ([], {'data': 'channel_data'}), '(data=channel_data)\n', (29991, 30010), False, 'from bosscore.serializers import CollectionSerializer, ExperimentSerializer, ChannelSerializer, CoordinateFrameSerializer, CoordinateFrameUpdateSerializer, ExperimentReadSerializer, ChannelReadSerializer, ExperimentUpdateSerializer, ChannelUpdateSerializer, CoordinateFrameDeleteSerializer\n'), ((31523, 31561), 'bosscore.error.BossPermissionError', 'BossPermissionError', (['"""add"""', 'experiment'], {}), "('add', experiment)\n", (31542, 31561), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((31621, 31658), 'bosscore.error.BossResourceNotFoundError', 'BossResourceNotFoundError', (['collection'], {}), '(collection)\n', (31646, 31658), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((31718, 31755), 'bosscore.error.BossResourceNotFoundError', 'BossResourceNotFoundError', (['experiment'], {}), '(experiment)\n', (31743, 31755), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((31812, 31846), 'bosscore.error.BossResourceNotFoundError', 'BossResourceNotFoundError', (['channel'], {}), '(channel)\n', (31837, 31846), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((31959, 32023), 'bosscore.error.BossHTTPError', 'BossHTTPError', (['"""Value Error in post data"""', 'ErrorCodes.TYPE_ERROR'], {}), "('Value Error in post data', ErrorCodes.TYPE_ERROR)\n", (31972, 32023), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((32892, 32919), 'copy.deepcopy', 'copy.deepcopy', (['request.data'], {}), '(request.data)\n', (32905, 32919), False, 'import copy\n'), ((32947, 33005), 'bosscore.permissions.BossPermissionManager.is_in_group', 'BossPermissionManager.is_in_group', (['request.user', 'ADMIN_GRP'], {}), '(request.user, ADMIN_GRP)\n', (32980, 33005), False, 'from bosscore.permissions import BossPermissionManager\n'), ((34159, 34220), 'bosscore.serializers.ChannelUpdateSerializer', 'ChannelUpdateSerializer', (['channel_obj'], {'data': 'data', 'partial': '(True)'}), '(channel_obj, data=data, partial=True)\n', (34182, 34220), False, 'from bosscore.serializers import CollectionSerializer, ExperimentSerializer, ChannelSerializer, CoordinateFrameSerializer, CoordinateFrameUpdateSerializer, ExperimentReadSerializer, ChannelReadSerializer, ExperimentUpdateSerializer, ChannelUpdateSerializer, CoordinateFrameDeleteSerializer\n'), ((35759, 35797), 'bosscore.error.BossPermissionError', 'BossPermissionError', (['"""update"""', 'channel'], {}), "('update', channel)\n", (35778, 35797), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((35858, 35895), 'bosscore.error.BossResourceNotFoundError', 'BossResourceNotFoundError', (['collection'], {}), '(collection)\n', (35883, 35895), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((35955, 35992), 'bosscore.error.BossResourceNotFoundError', 'BossResourceNotFoundError', (['experiment'], {}), '(experiment)\n', (35980, 35992), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((36049, 36083), 'bosscore.error.BossResourceNotFoundError', 'BossResourceNotFoundError', (['channel'], {}), '(channel)\n', (36074, 36083), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((37243, 37257), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (37255, 37257), False, 'from django.utils import timezone\n'), ((37316, 37340), 'django.http.HttpResponse', 'HttpResponse', ([], {'status': '(204)'}), '(status=204)\n', (37328, 37340), False, 'from django.http import HttpResponse\n'), ((37382, 37420), 'bosscore.error.BossPermissionError', 'BossPermissionError', (['"""delete"""', 'channel'], {}), "('delete', channel)\n", (37401, 37420), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((37481, 37518), 'bosscore.error.BossResourceNotFoundError', 'BossResourceNotFoundError', (['collection'], {}), '(collection)\n', (37506, 37518), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((37578, 37615), 'bosscore.error.BossResourceNotFoundError', 'BossResourceNotFoundError', (['experiment'], {}), '(experiment)\n', (37603, 37615), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((37672, 37706), 'bosscore.error.BossResourceNotFoundError', 'BossResourceNotFoundError', (['channel'], {}), '(channel)\n', (37697, 37706), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((38454, 38514), 'guardian.shortcuts.get_objects_for_user', 'get_objects_for_user', (['request.user', '"""read"""'], {'klass': 'Collection'}), "(request.user, 'read', klass=Collection)\n", (38474, 38514), False, 'from guardian.shortcuts import get_objects_for_user\n'), ((2784, 2905), 'bosscore.error.BossHTTPError', 'BossHTTPError', (['"""Invalid Request. This Resource has been marked for deletion"""', 'ErrorCodes.RESOURCE_MARKED_FOR_DELETION'], {}), "('Invalid Request. This Resource has been marked for deletion',\n ErrorCodes.RESOURCE_MARKED_FOR_DELETION)\n", (2797, 2905), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((5776, 5801), 'rest_framework.response.Response', 'Response', (['serializer.data'], {}), '(serializer.data)\n', (5784, 5801), False, 'from rest_framework.response import Response\n'), ((10644, 10688), 'bosscore.models.CoordinateFrame.objects.get', 'CoordinateFrame.objects.get', ([], {'name': 'coordframe'}), '(name=coordframe)\n', (10671, 10688), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((10722, 10763), 'bosscore.serializers.CoordinateFrameSerializer', 'CoordinateFrameSerializer', (['coordframe_obj'], {}), '(coordframe_obj)\n', (10747, 10763), False, 'from bosscore.serializers import CollectionSerializer, ExperimentSerializer, ChannelSerializer, CoordinateFrameSerializer, CoordinateFrameUpdateSerializer, ExperimentReadSerializer, ChannelReadSerializer, ExperimentUpdateSerializer, ChannelUpdateSerializer, CoordinateFrameDeleteSerializer\n'), ((10791, 10816), 'rest_framework.response.Response', 'Response', (['serializer.data'], {}), '(serializer.data)\n', (10799, 10816), False, 'from rest_framework.response import Response\n'), ((13681, 13802), 'bosscore.error.BossHTTPError', 'BossHTTPError', (['"""Invalid Request. This Resource has been marked for deletion"""', 'ErrorCodes.RESOURCE_MARKED_FOR_DELETION'], {}), "('Invalid Request. This Resource has been marked for deletion',\n ErrorCodes.RESOURCE_MARKED_FOR_DELETION)\n", (13694, 13802), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((15399, 15501), 'bosscore.error.BossHTTPError', 'BossHTTPError', (['"""This request requires a valid coordinate frame"""', 'ErrorCodes.INVALID_POST_ARGUMENT'], {}), "('This request requires a valid coordinate frame', ErrorCodes.\n INVALID_POST_ARGUMENT)\n", (15412, 15501), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((15921, 16000), 'bosscore.models.Experiment.objects.get', 'Experiment.objects.get', ([], {'name': "experiment_data['name']", 'collection': 'collection_obj'}), "(name=experiment_data['name'], collection=collection_obj)\n", (15943, 16000), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((16106, 16196), 'bosscore.permissions.BossPermissionManager.add_permissions_primary_group', 'BossPermissionManager.add_permissions_primary_group', (['self.request.user', 'experiment_obj'], {}), '(self.request.user,\n experiment_obj)\n', (16157, 16196), False, 'from bosscore.permissions import BossPermissionManager\n'), ((16213, 16278), 'bosscore.permissions.BossPermissionManager.add_permissions_admin_group', 'BossPermissionManager.add_permissions_admin_group', (['experiment_obj'], {}), '(experiment_obj)\n', (16262, 16278), False, 'from bosscore.permissions import BossPermissionManager\n'), ((16466, 16554), 'bosscore.lookup.LookUpKey.add_lookup', 'LookUpKey.add_lookup', (['lookup_key', 'boss_key', 'collection_obj.name', 'experiment_obj.name'], {}), '(lookup_key, boss_key, collection_obj.name,\n experiment_obj.name)\n', (16486, 16554), False, 'from bosscore.lookup import LookUpKey\n'), ((16585, 16625), 'bosscore.serializers.ExperimentReadSerializer', 'ExperimentReadSerializer', (['experiment_obj'], {}), '(experiment_obj)\n', (16609, 16625), False, 'from bosscore.serializers import CollectionSerializer, ExperimentSerializer, ChannelSerializer, CoordinateFrameSerializer, CoordinateFrameUpdateSerializer, ExperimentReadSerializer, ChannelReadSerializer, ExperimentUpdateSerializer, ChannelUpdateSerializer, CoordinateFrameDeleteSerializer\n'), ((16653, 16710), 'rest_framework.response.Response', 'Response', (['serializer.data'], {'status': 'status.HTTP_201_CREATED'}), '(serializer.data, status=status.HTTP_201_CREATED)\n', (16661, 16710), False, 'from rest_framework.response import Response\n'), ((18792, 18858), 'bosscore.models.Experiment.objects.get', 'Experiment.objects.get', ([], {'name': 'experiment', 'collection': 'collection_obj'}), '(name=experiment, collection=collection_obj)\n', (18814, 18858), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((18892, 18932), 'bosscore.serializers.ExperimentReadSerializer', 'ExperimentReadSerializer', (['experiment_obj'], {}), '(experiment_obj)\n', (18916, 18932), False, 'from bosscore.serializers import CollectionSerializer, ExperimentSerializer, ChannelSerializer, CoordinateFrameSerializer, CoordinateFrameUpdateSerializer, ExperimentReadSerializer, ChannelReadSerializer, ExperimentUpdateSerializer, ChannelUpdateSerializer, CoordinateFrameDeleteSerializer\n'), ((18960, 18985), 'rest_framework.response.Response', 'Response', (['serializer.data'], {}), '(serializer.data)\n', (18968, 18985), False, 'from rest_framework.response import Response\n'), ((22065, 22118), 'bosscore.models.Channel.objects.get', 'Channel.objects.get', ([], {'name': 'name', 'experiment': 'experiment'}), '(name=name, experiment=experiment)\n', (22084, 22118), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((22206, 22259), 'bosscore.models.Channel.objects.get', 'Channel.objects.get', ([], {'name': 'name', 'experiment': 'experiment'}), '(name=name, experiment=experiment)\n', (22225, 22259), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((25954, 26075), 'bosscore.error.BossHTTPError', 'BossHTTPError', (['"""Invalid Request. This Resource has been marked for deletion"""', 'ErrorCodes.RESOURCE_MARKED_FOR_DELETION'], {}), "('Invalid Request. This Resource has been marked for deletion',\n ErrorCodes.RESOURCE_MARKED_FOR_DELETION)\n", (25967, 26075), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((30150, 30223), 'bosscore.models.Channel.objects.get', 'Channel.objects.get', ([], {'name': "channel_data['name']", 'experiment': 'experiment_obj'}), "(name=channel_data['name'], experiment=experiment_obj)\n", (30169, 30223), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((30611, 30698), 'bosscore.permissions.BossPermissionManager.add_permissions_primary_group', 'BossPermissionManager.add_permissions_primary_group', (['self.request.user', 'channel_obj'], {}), '(self.request.user,\n channel_obj)\n', (30662, 30698), False, 'from bosscore.permissions import BossPermissionManager\n'), ((30715, 30777), 'bosscore.permissions.BossPermissionManager.add_permissions_admin_group', 'BossPermissionManager.add_permissions_admin_group', (['channel_obj'], {}), '(channel_obj)\n', (30764, 30777), False, 'from bosscore.permissions import BossPermissionManager\n'), ((31055, 31161), 'bosscore.lookup.LookUpKey.add_lookup', 'LookUpKey.add_lookup', (['lookup_key', 'boss_key', 'collection_obj.name', 'experiment_obj.name', 'channel_obj.name'], {}), '(lookup_key, boss_key, collection_obj.name,\n experiment_obj.name, channel_obj.name)\n', (31075, 31161), False, 'from bosscore.lookup import LookUpKey\n'), ((31233, 31267), 'bosscore.serializers.ChannelReadSerializer', 'ChannelReadSerializer', (['channel_obj'], {}), '(channel_obj)\n', (31254, 31267), False, 'from bosscore.serializers import CollectionSerializer, ExperimentSerializer, ChannelSerializer, CoordinateFrameSerializer, CoordinateFrameUpdateSerializer, ExperimentReadSerializer, ChannelReadSerializer, ExperimentUpdateSerializer, ChannelUpdateSerializer, CoordinateFrameDeleteSerializer\n'), ((31295, 31352), 'rest_framework.response.Response', 'Response', (['serializer.data'], {'status': 'status.HTTP_201_CREATED'}), '(serializer.data, status=status.HTTP_201_CREATED)\n', (31303, 31352), False, 'from rest_framework.response import Response\n'), ((33094, 33196), 'bosscore.error.BossHTTPError', 'BossHTTPError', (['"""Only admins can change storage_type after creation"""', 'ErrorCodes.MISSING_PERMISSION'], {}), "('Only admins can change storage_type after creation',\n ErrorCodes.MISSING_PERMISSION)\n", (33107, 33196), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((33335, 33414), 'bosscore.error.BossHTTPError', 'BossHTTPError', (['"""Only admins can set bucket name"""', 'ErrorCodes.MISSING_PERMISSION'], {}), "('Only admins can set bucket name', ErrorCodes.MISSING_PERMISSION)\n", (33348, 33414), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((33518, 33593), 'bosscore.error.BossHTTPError', 'BossHTTPError', (['"""Only admins can set cv_path"""', 'ErrorCodes.MISSING_PERMISSION'], {}), "('Only admins can set cv_path', ErrorCodes.MISSING_PERMISSION)\n", (33531, 33593), False, 'from bosscore.error import BossError, BossHTTPError, BossPermissionError, BossResourceNotFoundError, ErrorCodes\n'), ((34336, 34401), 'bosscore.models.Channel.objects.get', 'Channel.objects.get', ([], {'name': 'channel_name', 'experiment': 'experiment_obj'}), '(name=channel_name, experiment=experiment_obj)\n', (34355, 34401), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((35407, 35467), 'bosscore.models.Channel.objects.get', 'Channel.objects.get', ([], {'name': 'channel', 'experiment': 'experiment_obj'}), '(name=channel, experiment=experiment_obj)\n', (35426, 35467), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((35501, 35535), 'bosscore.serializers.ChannelReadSerializer', 'ChannelReadSerializer', (['channel_obj'], {}), '(channel_obj)\n', (35522, 35535), False, 'from bosscore.serializers import CollectionSerializer, ExperimentSerializer, ChannelSerializer, CoordinateFrameSerializer, CoordinateFrameUpdateSerializer, ExperimentReadSerializer, ChannelReadSerializer, ExperimentUpdateSerializer, ChannelUpdateSerializer, CoordinateFrameDeleteSerializer\n'), ((35563, 35588), 'rest_framework.response.Response', 'Response', (['serializer.data'], {}), '(serializer.data)\n', (35571, 35588), False, 'from rest_framework.response import Response\n'), ((41731, 41783), 'bosscore.models.CoordinateFrame.objects.filter', 'CoordinateFrame.objects.filter', ([], {'creator': 'request.user'}), '(creator=request.user)\n', (41761, 41783), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((41856, 41885), 'bosscore.models.CoordinateFrame.objects.all', 'CoordinateFrame.objects.all', ([], {}), '()\n', (41883, 41885), False, 'from bosscore.models import Collection, Experiment, Channel, CoordinateFrame\n'), ((5669, 5747), 'bosscore.lookup.LookUpKey.update_lookup_collection', 'LookUpKey.update_lookup_collection', (['lookup_key', 'boss_key', "request.data['name']"], {}), "(lookup_key, boss_key, request.data['name'])\n", (5703, 5747), False, 'from bosscore.lookup import LookUpKey\n'), ((13996, 14018), 'logging.Logger', 'logging.Logger', (['"""boss"""'], {}), "('boss')\n", (14010, 14018), False, 'import logging\n'), ((18540, 18644), 'bosscore.lookup.LookUpKey.update_lookup_experiment', 'LookUpKey.update_lookup_experiment', (['lookup_key', 'boss_key', 'collection_obj.name', "request.data['name']"], {}), "(lookup_key, boss_key, collection_obj.\n name, request.data['name'])\n", (18574, 18644), False, 'from bosscore.lookup import LookUpKey\n'), ((35110, 35215), 'bosscore.lookup.LookUpKey.update_lookup', 'LookUpKey.update_lookup', (['lookup_key', 'boss_key', 'collection_obj.name', 'experiment_obj.name', "data['name']"], {}), "(lookup_key, boss_key, collection_obj.name,\n experiment_obj.name, data['name'])\n", (35133, 35215), False, 'from bosscore.lookup import LookUpKey\n')] |
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
This is the pygame minimal example.
"""
__revision__ = "$Rev$"
__version__ = "3.0.0." + __revision__[6:-2]
__author__ = 'DR0ID @ 2009-2011'
import sys
import os
try:
import _path
except:
pass
import tiledtmxloader
# -----------------------------------------------------------------------------
def main():
"""
Main method.
"""
args = sys.argv[1:]
if len(args) < 1:
path_to_map = os.path.join(os.pardir, "001-1.tmx")
print(("usage: python %s your_map.tmx\n\nUsing default map '%s'\n" % \
(os.path.basename(__file__), path_to_map)))
else:
path_to_map = args[0]
demo_pygame(path_to_map)
# -----------------------------------------------------------------------------
def demo_pygame(file_name):
"""
Example showing how to load a map.
"""
# parser the map (it is done here to initialize the
# window the same size as the map if it is small enough)
world_map = tiledtmxloader.tmxreader.TileMapParser().parse_decode(file_name)
# print the filename
print("loaded map:", world_map.map_file_name)
# let see how many pixels it will use
x_pixels = world_map.pixel_width
y_pixels = world_map.pixel_height
print("map size in pixels:", x_pixels, y_pixels)
# let see the tilesize
print("tile size used:", world_map.tilewidth, world_map.tileheight)
# number of tiles
print("tiles used:", world_map.width, world_map.height)
# count the layers
print("found '", len(world_map.layers), "' layers on this map")
# # just to see if the map was loaded correctly we print
# # it on the console, warning: may be huge output!
# # tiledtmxloader.tmxreader.printer(world_map)
# -----------------------------------------------------------------------------
if __name__ == '__main__':
main()
| [
"tiledtmxloader.tmxreader.TileMapParser",
"os.path.join",
"os.path.basename"
] | [((472, 508), 'os.path.join', 'os.path.join', (['os.pardir', '"""001-1.tmx"""'], {}), "(os.pardir, '001-1.tmx')\n", (484, 508), False, 'import os\n'), ((1014, 1054), 'tiledtmxloader.tmxreader.TileMapParser', 'tiledtmxloader.tmxreader.TileMapParser', ([], {}), '()\n', (1052, 1054), False, 'import tiledtmxloader\n'), ((601, 627), 'os.path.basename', 'os.path.basename', (['__file__'], {}), '(__file__)\n', (617, 627), False, 'import os\n')] |
# Generated by Django 3.0.2 on 2020-02-17 03:13
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('projects', '0009_auto_20200217_0306'),
]
operations = [
migrations.AlterField(
model_name='profile',
name='profile_pic',
field=models.ImageField(blank=True, default='default.png', upload_to='profile_pics'),
),
]
| [
"django.db.models.ImageField"
] | [((343, 421), 'django.db.models.ImageField', 'models.ImageField', ([], {'blank': '(True)', 'default': '"""default.png"""', 'upload_to': '"""profile_pics"""'}), "(blank=True, default='default.png', upload_to='profile_pics')\n", (360, 421), False, 'from django.db import migrations, models\n')] |
import os
import hashlib
def getHash(f):
line=f.readline()
hash=hashlib.md5()
while(line):
hash.update(line)
line=f.readline()
return hash.hexdigest()
def IsHashEqual(f1,f2):
str1=getHash(f1)
str2=getHash(f2)
return str1==str2
if __name__ == '__main__':
cmds = []
ans = []
cmd = "./bin/splc"
for i in range(1,10):
file_name = cmd+ " ../test/test_1_r0"+str(i)+".spl > rst"
a = "../test/test_1_r0" + str(i) +".out"
cmds.append(file_name)
ans.append(a)
for i in range(10,13):
file_name =cmd+ " ../test/test_1_r"+str(i)+".spl > rst"
cmds.append(file_name)
a = "../test/test_1_r" + str(i) +".out"
ans.append(a)
for i in range(len(cmds)):
print("========"+ str(i+1) +"=======")
os.system(cmds[i])
f1=open("./rst","rb")
f2=open(ans[i],"rb")
print(IsHashEqual(f1,f2)) | [
"os.system",
"hashlib.md5"
] | [((69, 82), 'hashlib.md5', 'hashlib.md5', ([], {}), '()\n', (80, 82), False, 'import hashlib\n'), ((805, 823), 'os.system', 'os.system', (['cmds[i]'], {}), '(cmds[i])\n', (814, 823), False, 'import os\n')] |
# -*- coding: utf-8 -*-
# @Time : 2017/8/2 10:46
# @Author : play4fun
# @File : test_video.py
# @Software: PyCharm
"""
test_video.py:
"""
import numpy as np
import cv2
from matplotlib import pyplot as plt
cap = cv2.VideoCapture('../../data/vtest.avi')#不支持读取视频
# cap = cv2.VideoCapture('output.avi')
# cap = cv2.VideoCapture('Minions_banana.mp4')
# 帧率
fps = cap.get(cv2.CAP_PROP_FPS) # 25.0
print("Frames per second using video.get(cv2.CAP_PROP_FPS) : {0}".format(fps))
# 总共有多少帧
num_frames = cap.get(cv2.CAP_PROP_FRAME_COUNT)
print('共有', num_frames, '帧')
#
frame_height = cap.get(cv2.CAP_PROP_FRAME_HEIGHT)
frame_width = cap.get(cv2.CAP_PROP_FRAME_WIDTH)
print('高:', frame_height, '宽:', frame_width)
FRAME_NOW = cap.get(cv2.CAP_PROP_POS_FRAMES) # 第0帧
print('当前帧数', FRAME_NOW) # 当前帧数 0.0
# 读取指定帧,对视频文件才有效,对摄像头无效??
# frame_no = 121
# cap.set(1, frame_no) # Where frame_no is the frame you want
ret, frame = cap.read() # Read the frame
print(ret, frame)
# cv2.imshow('frame_no'+str(frame_no), frame)
FRAME_NOW = cap.get(cv2.CAP_PROP_POS_FRAMES)
print('当前帧数', FRAME_NOW) # 当前帧数 122.0
if frame is not None:#出错
plt.imshow(frame)
# plt.imshow(cv2.cvtColor(frame, cv2.COLOR_BGR2RGB))
plt.show() | [
"matplotlib.pyplot.imshow",
"cv2.VideoCapture",
"matplotlib.pyplot.show"
] | [((221, 261), 'cv2.VideoCapture', 'cv2.VideoCapture', (['"""../../data/vtest.avi"""'], {}), "('../../data/vtest.avi')\n", (237, 261), False, 'import cv2\n'), ((1131, 1148), 'matplotlib.pyplot.imshow', 'plt.imshow', (['frame'], {}), '(frame)\n', (1141, 1148), True, 'from matplotlib import pyplot as plt\n'), ((1210, 1220), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1218, 1220), True, 'from matplotlib import pyplot as plt\n')] |
#API setup
from picraft import Vector
from picraft import World, Block
def translate_left(position, data):
if data == 0:
return position - Vector(z=1)
elif data == 1:
return position + Vector(z=1)
elif data == 2:
return position + Vector(x=1)
else:
return position - Vector(x=1)
def translate_right(position, data):
if data == 0:
return position + Vector(z=1)
elif data == 1:
return position - Vector(z=1)
elif data == 2:
return position - Vector(x=1)
else:
return position + Vector(x=1)
def rotate_clockwise(data):
if data == 0:
return 3
elif data == 1:
return 2
elif data == 2:
return 0
else:
return 1
def rotate_anticlockwise(data):
if data == 0:
return 2
elif data == 1:
return 3
elif data == 2:
return 1
else:
return 0
def face_to_dataval(face):
if face == 'x-':
return 0
elif face == 'x+':
return 1
elif face == 'z-':
return 2
elif face == 'z+':
return 3
else:
print('Error: you hit the top face')
return -1
def operation(world, position, data):
#center block
world.blocks[position] = Block(53, data)
#left block
new_position = translate_left(position, data)
new_data = rotate_clockwise(data)
world.blocks[new_position] = Block(53, new_data)
#right block
new_position = translate_right(position, data)
new_data = rotate_anticlockwise(data)
world.blocks[new_position] = Block(53, new_data)
def main():
#API setup
world = World()
while True:
#get recent sword hits
hits = world.events.poll()
for hit in hits:
#get rotation
position = hit.pos
data = face_to_dataval(hit.face)
#call the building function
if data != -1:
operation(world, position, data)
if __name__ == "__main__":
main()
| [
"picraft.Vector",
"picraft.Block",
"picraft.World"
] | [((1065, 1080), 'picraft.Block', 'Block', (['(53)', 'data'], {}), '(53, data)\n', (1070, 1080), False, 'from picraft import World, Block\n'), ((1209, 1228), 'picraft.Block', 'Block', (['(53)', 'new_data'], {}), '(53, new_data)\n', (1214, 1228), False, 'from picraft import World, Block\n'), ((1364, 1383), 'picraft.Block', 'Block', (['(53)', 'new_data'], {}), '(53, new_data)\n', (1369, 1383), False, 'from picraft import World, Block\n'), ((1419, 1426), 'picraft.World', 'World', ([], {}), '()\n', (1424, 1426), False, 'from picraft import World, Block\n'), ((143, 154), 'picraft.Vector', 'Vector', ([], {'z': '(1)'}), '(z=1)\n', (149, 154), False, 'from picraft import Vector\n'), ((365, 376), 'picraft.Vector', 'Vector', ([], {'z': '(1)'}), '(z=1)\n', (371, 376), False, 'from picraft import Vector\n'), ((192, 203), 'picraft.Vector', 'Vector', ([], {'z': '(1)'}), '(z=1)\n', (198, 203), False, 'from picraft import Vector\n'), ((414, 425), 'picraft.Vector', 'Vector', ([], {'z': '(1)'}), '(z=1)\n', (420, 425), False, 'from picraft import Vector\n'), ((241, 252), 'picraft.Vector', 'Vector', ([], {'x': '(1)'}), '(x=1)\n', (247, 252), False, 'from picraft import Vector\n'), ((280, 291), 'picraft.Vector', 'Vector', ([], {'x': '(1)'}), '(x=1)\n', (286, 291), False, 'from picraft import Vector\n'), ((463, 474), 'picraft.Vector', 'Vector', ([], {'x': '(1)'}), '(x=1)\n', (469, 474), False, 'from picraft import Vector\n'), ((502, 513), 'picraft.Vector', 'Vector', ([], {'x': '(1)'}), '(x=1)\n', (508, 513), False, 'from picraft import Vector\n')] |
import json
#json does not support int type key in dictionary,
#so I created this function to do that.
def str_key2int(str_key_dic):
int_key_dic = dict()
for strkey in str_key_dic:
int_key_dic[int(strkey)] = str_key_dic[strkey]
return int_key_dic
def dum_config():
dict_json = {'NUTURAL_POSES_PULSE': NUTURAL_POSES_PULSE,\
'NUTURAL_POSES_DEG': NUTURAL_POSES_DEG,\
'DIRECTION_POSES_PULSE': DIRECTION_POSES_PULSE,\
'SERVO_ID_MAPPING':SERVO_ID_MAPPING,
'PULSES2SERVOS':PULSES2SERVOS
}
out_file = open("const_hardware_config.json", "w")
json.dump(dict_json,out_file,indent = 4)
print("IN const hardware: ")
#print(NUTURAL_POSES_PULSE)
out_file = open("./config/const_hardware_config.json", "r")
config_json = json.load(out_file)
NUTURAL_POSES_PULSE = str_key2int(config_json['NUTURAL_POSES_PULSE'])
NUTURAL_POSES_DEG = str_key2int(config_json['NUTURAL_POSES_DEG'])
DIRECTION_POSES_PULSE = str_key2int(config_json['DIRECTION_POSES_PULSE'])
SERVO_ID_MAPPING = str_key2int(config_json['SERVO_ID_MAPPING'])
PULSES2SERVOS = str_key2int(config_json['PULSES2SERVOS'])
if __name__ == "__main__":
out_file = open("../config/const_hardware_config.json", "r")
config_json = json.load(out_file)
NUTURAL_POSES_PULSE = config_json['NUTURAL_POSES_PULSE']
NUTURAL_POSES_DEG = config_json['NUTURAL_POSES_DEG']
DIRECTION_POSES_PULSE = config_json['DIRECTION_POSES_PULSE']
SERVO_ID_MAPPING = config_json['SERVO_ID_MAPPING']
PULSES2SERVOS = config_json['PULSES2SERVOS']
print("NUTURAL_POSES_PULSE:")
print(NUTURAL_POSES_PULSE) | [
"json.load",
"json.dump"
] | [((850, 869), 'json.load', 'json.load', (['out_file'], {}), '(out_file)\n', (859, 869), False, 'import json\n'), ((656, 696), 'json.dump', 'json.dump', (['dict_json', 'out_file'], {'indent': '(4)'}), '(dict_json, out_file, indent=4)\n', (665, 696), False, 'import json\n'), ((1332, 1351), 'json.load', 'json.load', (['out_file'], {}), '(out_file)\n', (1341, 1351), False, 'import json\n')] |
import unittest
from nzmath.rational import *
import nzmath.finitefield as finitefield
from nzmath.plugins import FLOATTYPE as Float
# Rational, Integer, theIntegerRing, theRationalField
class RationalTest (unittest.TestCase):
def testInit(self):
self.assertEqual("2/1", str(Rational(2)))
self.assertEqual("2/1", str(Rational(2)))
self.assertEqual("1/2", str(Rational(1,2)))
self.assertEqual("1/2", str(Rational(Rational(1,2))))
self.assertEqual("21/26", str(Rational(Rational(7,13),Rational(2,3))))
self.assertEqual("3/2", str(Rational(1.5)))
self.assertEqual("3/4", str(Rational(1.5, 2.0)))
self.assertRaises(ZeroDivisionError, Rational, 1, 0)
self.assertRaises(TypeError, Rational, 1, finitefield.FinitePrimeFieldElement(1,7))
self.assertRaises(TypeError, Rational, finitefield.FinitePrimeFieldElement(1,7), 4)
def testPos(self):
self.assertEqual("1/2", str(+Rational(2,4)))
self.assertEqual("-3/4", str(+Rational(-3,4)))
def testNeg(self):
self.assertEqual(-Rational(2,4), Rational(-1,2))
self.assertEqual("3/4", str(-Rational(-3,4)))
def testAdd(self):
self.assertEqual(Rational(13,6), Rational(2,3) + Rational(3,2))
self.assertEqual(Rational(31,18), Rational(13,18) + 1)
self.assertEqual(Rational(2000000000000000000000000000000000000001,2),
1000000000000000000000000000000000000000 + Rational(1,2))
self.assertEqual(1, Rational(1,2) + Rational(1,3) + Rational(1,6))
self.assertEqual(1, Rational(1,2) + 0.5)
self.assertEqual(1, 0.5 + Rational(1,2))
def testIadd(self):
a = Rational(1,2)
a += Rational(1,3)
self.assertEqual(Rational(5,6), a)
def testSub(self):
self.assertEqual(Rational(-5,6), Rational(2,3) - Rational(3,2))
self.assertEqual(Rational(-5,18), Rational(13,18) - 1)
self.assertEqual(Rational(1999999999999999999999999999999999999999,2),
1000000000000000000000000000000000000000 - Rational(1,2))
self.assertEqual(0, Rational(1,2) - Rational(1,3) - Rational(1,6))
self.assertEqual(0, Rational(1,2) - 0.5)
self.assertEqual(0, 0.5 - Rational(1,2))
def testIsub(self):
a = Rational(1,2)
a -= Rational(1,3)
self.assertEqual(Rational(1,6), a)
def testMul(self):
self.assertEqual(1, Rational(2,3) * Rational(3,2))
self.assertEqual(Rational(26,18), Rational(13,18) * 2)
self.assertEqual(500000000000000000000000000000000000000,
1000000000000000000000000000000000000000 * Rational(1,2))
self.assertEqual(Rational(1, 36), Rational(1,2) * Rational(1,3) * Rational(1,6))
## self.assertEqual(Rational(1,4), Rational(1,2) * 0.5)
## self.assertEqual(Rational(1,4), 0.5 * Rational(1,2))
self.assertEqual(0.25, Rational(1,2) * 0.5)
self.assertEqual(0.25, 0.5 * Rational(1,2))
self.assertEqual(Float(0.25), Rational(1,2) * Float(0.5))
self.assertEqual(Float(0.25), Float(0.5) * Rational(1,2))
def testImul(self):
a = Rational(1,2)
a *= Rational(1,3)
self.assertEqual(Rational(1,6), a)
def testDiv(self):
self.assertEqual(Rational(4,9), Rational(2,3) / Rational(3,2))
self.assertEqual(Rational(13,36), Rational(13,18) / 2)
self.assertEqual(2000000000000000000000000000000000000000,
1000000000000000000000000000000000000000 / Rational(1,2))
self.assertEqual(9, Rational(1,2) / Rational(1,3) / Rational(1,6))
self.assertEqual(1, Rational(1,2) / 0.5)
self.assertEqual(1, 0.5 / Rational(1,2))
def testIdiv(self):
a = Rational(1,2)
a /= Rational(1,3)
self.assertEqual(Rational(3,2), a)
def testPow(self):
self.assertEqual(Rational(2**4, 3**4), Rational(2,3) ** 4)
self.assertEqual(Rational(3,2), Rational(2,3) ** (-1))
def testIpow(self):
a = Rational(1,2)
a **= 3
self.assertEqual(Rational(1,8), a)
a **= -1
self.assertEqual(8, a)
def testLt(self):
self.assertTrue(Rational(5,7) < Rational(3,4))
self.assertFalse(Rational(3,4) < Rational(5,7))
self.assertFalse(Rational(3,4) < Rational(3,4))
self.assertTrue(Rational(132,133) < 1)
self.assertTrue(Rational(-13,12) < -1)
self.assertTrue(1 > Rational(132,133))
self.assertTrue(Rational(132,133) < 1.000001)
def testLe(self):
self.assertTrue(Rational(5,7) <= Rational(3,4))
self.assertFalse(Rational(3,4) <= Rational(5,7))
self.assertTrue(Rational(3,4) <= Rational(3,4))
self.assertTrue(Rational(132,133) <= 1)
self.assertTrue(Rational(-13,12) <= -1)
self.assertTrue(1 >= Rational(132,133))
def testEq(self):
self.assertTrue(Rational(1,2) == Rational(1,2))
self.assertTrue(Rational(-1,2) == Rational(-1,2))
self.assertTrue(Rational(4,2) == 2)
self.assertTrue(2 == Rational(14,7))
self.assertFalse(Rational(3,5) == Rational(27,46))
def testNe(self):
self.assertTrue(Rational(1,2) != Rational(1,3))
self.assertTrue(Rational(1,2) != Rational(-1,2))
self.assertFalse(Rational(1,2) != Rational(1,2))
def testGt(self):
self.assertTrue(Rational(3,4) > Rational(5,7))
self.assertFalse(Rational(5,7) > Rational(3,4))
self.assertFalse(Rational(3,4) > Rational(3,4))
self.assertTrue(Rational(13,12) > 1)
self.assertTrue(Rational(-11,12) > -1)
self.assertTrue(1 < Rational(134,133))
def testGe(self):
self.assertTrue(Rational(3,4) >= Rational(5,7))
self.assertFalse(Rational(5,7) >= Rational(3,4))
self.assertTrue(Rational(3,4) >= Rational(3,4))
self.assertTrue(Rational(13,12) >= 1)
self.assertTrue(Rational(-11,12) >= -1)
self.assertTrue(1 <= Rational(134,133))
def testLong(self):
self.assertTrue(1 == int(Rational(13,12)))
self.assertTrue(0 == int(Rational(12,13)))
self.assertTrue(-1 == int(Rational(-1,14)))
def testInt(self):
self.assertTrue(1 == int(Rational(13,12)))
self.assertTrue(0 == int(Rational(12,13)))
self.assertTrue(-1 == int(Rational(-1,14)))
def testTrim(self):
self.assertEqual(Rational(1,3), Rational(333,1000).trim(5))
self.assertEqual(Rational(13,21), Rational(34,55).trim(33))
self.assertEqual(Rational(21,34), Rational(34,55).trim(34))
def testExpand(self):
self.assertEqual(Rational(-33,100), Rational(-1, 3).expand(10,100))
def testFloat(self):
self.assertTrue(isinstance(float(Rational(1,4)), float))
self.assertEqual(0.25, float(Rational(1,4)))
def testDecimalString(self):
self.assertEqual("0.25000", Rational(1,4).decimalString(5))
self.assertEqual("0.33333", Rational(1,3).decimalString(5))
def testNonzero(self):
self.assertTrue(Rational(1,1))
self.assertFalse(Rational(0,1))
def testHash(self):
self.assertTrue(hash(Rational(1,2)))
self.assertEqual(hash(Rational(1)), hash(Rational(1)))
self.assertNotEqual(hash(Rational(1)), hash(Rational(2)))
self.assertNotEqual(hash(Rational(1,2)), hash(Rational(2,3)))
self.assertEqual(hash(Rational(3,111)), hash(Rational(36,1332)))
class IntegerTest(unittest.TestCase):
def setUp(self):
self.three = Integer(3)
def testMul(self):
self.assertEqual(24, self.three * 8)
self.assertEqual([0,0,0], self.three * [0])
self.assertEqual((0,0,0), self.three * (0,))
self.assertEqual(Rational(6,5), self.three * Rational(2,5))
def testRmul(self):
self.assertEqual(24, 8 * self.three)
self.assertEqual([0,0,0], [0] * self.three)
self.assertEqual((0,0,0), (0,) * self.three)
def testRmod(self):
self.assertEqual(1, 4 % self.three)
def testTruediv(self):
self.assertEqual(Rational(1, 3), 1 / self.three)
self.assertEqual(Rational(2, 1), 2 / Integer(1))
self.assertEqual(Rational, type(2 / Integer(1)))
def testPow(self):
self.assertEqual(25, pow(5, Integer(2)))
self.assertEqual(1, pow(self.three, 4, 5))
# return Rational when index is negative
self.assertEqual(Rational(1, 2), pow(Integer(2), -1))
# not clear that failing these tests is an issue
# self.assertRaises(TypeError, pow, 3, Integer(4), 5)
# self.assertRaises(TypeError, pow, 3, 4, Integer(5))
# raise ValueError when index is negative and modulus is given
self.assertRaises(ValueError, pow, Integer(2), -1, 5)
def testGetRing(self):
self.assertEqual(theIntegerRing, self.three.getRing())
def testNonzero(self):
self.assertTrue(Integer(1))
self.assertFalse(Integer(0))
def testHash(self):
self.assertTrue(hash(Integer(12)))
self.assertEqual(hash(Integer(1)), hash(Integer(1)))
self.assertNotEqual(hash(Integer(1)), hash(Integer(2)))
class IntegerRingTest(unittest.TestCase):
def testContains(self):
self.assertTrue(1 in theIntegerRing)
self.assertTrue(1 in theIntegerRing)
self.assertTrue(Integer(1) in theIntegerRing)
self.assertTrue(Rational(1,2) not in theIntegerRing)
self.assertTrue((1,) not in theIntegerRing)
def testGetQuotientField(self):
self.assertTrue(theRationalField is theIntegerRing.getQuotientField())
def testIssubring(self):
self.assertTrue(theIntegerRing.issubring(theRationalField))
self.assertTrue(theIntegerRing.issubring(theIntegerRing))
def testIssuperring(self):
self.assertFalse(theIntegerRing.issuperring(theRationalField))
self.assertTrue(theIntegerRing.issuperring(theIntegerRing))
def testProperties(self):
self.assertTrue(theIntegerRing.isdomain())
self.assertTrue(theIntegerRing.isnoetherian())
self.assertTrue(theIntegerRing.iseuclidean())
self.assertTrue(theIntegerRing.isufd())
self.assertTrue(theIntegerRing.ispid())
self.assertFalse(theIntegerRing.isfield())
def testGcd(self):
self.assertEqual(1, theIntegerRing.gcd(1, 2))
self.assertEqual(2, theIntegerRing.gcd(2, 4))
self.assertEqual(10, theIntegerRing.gcd(0, 10))
self.assertEqual(10, theIntegerRing.gcd(10, 0))
self.assertEqual(1, theIntegerRing.gcd(13, 21))
def testLcm(self):
self.assertEqual(2, theIntegerRing.lcm(1, 2))
self.assertEqual(4, theIntegerRing.lcm(2, 4))
self.assertEqual(0, theIntegerRing.lcm(0, 10))
self.assertEqual(0, theIntegerRing.lcm(10, 0))
self.assertEqual(273, theIntegerRing.lcm(13, 21))
def testExtGcd(self):
self.assertEqual((1, 0, 1), theIntegerRing.extgcd(1, 2))
def testConstants(self):
self.assertEqual(1, theIntegerRing.one)
self.assertTrue(isinstance(theIntegerRing.one, Integer))
self.assertEqual(0, theIntegerRing.zero)
self.assertTrue(isinstance(theIntegerRing.zero, Integer))
def testStrings(self):
# str
self.assertEqual("Z", str(theIntegerRing))
# repr
self.assertEqual("IntegerRing()", repr(theIntegerRing))
def testHash(self):
dictionary = {}
dictionary[theIntegerRing] = 1
self.assertEqual(1, dictionary[IntegerRing()])
class RationalFieldTest(unittest.TestCase):
def testContains(self):
self.assertTrue(1 in theRationalField)
self.assertTrue(1 in theRationalField)
self.assertTrue(Integer(1) in theRationalField)
self.assertTrue(Rational(1,2) in theRationalField)
self.assertTrue(3.14 not in theRationalField)
self.assertTrue((1,2) not in theRationalField)
def testGetQuotientField(self):
self.assertTrue(theRationalField is theRationalField.getQuotientField())
def testIssubring(self):
self.assertTrue(theRationalField.issubring(theRationalField))
self.assertFalse(theRationalField.issubring(theIntegerRing))
def testIssuperring(self):
self.assertTrue(theRationalField.issuperring(theRationalField))
self.assertTrue(theRationalField.issuperring(theIntegerRing))
def testProperties(self):
self.assertTrue(theRationalField.isfield())
self.assertTrue(theRationalField.isdomain())
def testConstants(self):
self.assertEqual(1, theRationalField.one)
self.assertTrue(isinstance(theRationalField.one, Rational))
self.assertEqual(0, theRationalField.zero)
self.assertTrue(isinstance(theRationalField.zero, Rational))
def testStrings(self):
# str
self.assertEqual("Q", str(theRationalField))
# repr
self.assertEqual("RationalField()", repr(theRationalField))
def testHash(self):
dictionary = {}
dictionary[theRationalField] = 1
self.assertEqual(1, dictionary[RationalField()])
class IntegerIfIntOrLongTest (unittest.TestCase):
def testInt(self):
b = IntegerIfIntOrLong(1)
self.assertTrue(isinstance(b, Integer))
def testLong(self):
b = IntegerIfIntOrLong(1)
self.assertTrue(isinstance(b, Integer))
def testRational(self):
b = IntegerIfIntOrLong(Rational(1,2))
self.assertFalse(isinstance(b, Integer))
self.assertTrue(isinstance(b, Rational))
def testTuple(self):
s = IntegerIfIntOrLong((1,1))
self.assertTrue(isinstance(s, tuple))
for i in s:
self.assertTrue(isinstance(i, Integer))
def testList(self):
s = IntegerIfIntOrLong([1,1])
self.assertTrue(isinstance(s, list))
for i in s:
self.assertTrue(isinstance(i, Integer))
def testListOfTuple(self):
ss = IntegerIfIntOrLong([(1,1),(2,2)])
self.assertTrue(isinstance(ss, list))
for s in ss:
self.assertTrue(isinstance(s, tuple))
for i in s:
self.assertTrue(isinstance(i, Integer))
def suite(suffix = "Test"):
suite = unittest.TestSuite()
all_names = globals()
for name in all_names:
if name.endswith(suffix):
suite.addTest(unittest.makeSuite(all_names[name], "test"))
return suite
if __name__ == '__main__':
runner = unittest.TextTestRunner()
runner.run(suite())
| [
"unittest.TestSuite",
"nzmath.finitefield.FinitePrimeFieldElement",
"unittest.makeSuite",
"nzmath.plugins.FLOATTYPE",
"unittest.TextTestRunner"
] | [((14236, 14256), 'unittest.TestSuite', 'unittest.TestSuite', ([], {}), '()\n', (14254, 14256), False, 'import unittest\n'), ((14473, 14498), 'unittest.TextTestRunner', 'unittest.TextTestRunner', ([], {}), '()\n', (14496, 14498), False, 'import unittest\n'), ((766, 807), 'nzmath.finitefield.FinitePrimeFieldElement', 'finitefield.FinitePrimeFieldElement', (['(1)', '(7)'], {}), '(1, 7)\n', (801, 807), True, 'import nzmath.finitefield as finitefield\n'), ((855, 896), 'nzmath.finitefield.FinitePrimeFieldElement', 'finitefield.FinitePrimeFieldElement', (['(1)', '(7)'], {}), '(1, 7)\n', (890, 896), True, 'import nzmath.finitefield as finitefield\n'), ((2999, 3010), 'nzmath.plugins.FLOATTYPE', 'Float', (['(0.25)'], {}), '(0.25)\n', (3004, 3010), True, 'from nzmath.plugins import FLOATTYPE as Float\n'), ((3065, 3076), 'nzmath.plugins.FLOATTYPE', 'Float', (['(0.25)'], {}), '(0.25)\n', (3070, 3076), True, 'from nzmath.plugins import FLOATTYPE as Float\n'), ((3028, 3038), 'nzmath.plugins.FLOATTYPE', 'Float', (['(0.5)'], {}), '(0.5)\n', (3033, 3038), True, 'from nzmath.plugins import FLOATTYPE as Float\n'), ((3078, 3088), 'nzmath.plugins.FLOATTYPE', 'Float', (['(0.5)'], {}), '(0.5)\n', (3083, 3088), True, 'from nzmath.plugins import FLOATTYPE as Float\n'), ((14370, 14413), 'unittest.makeSuite', 'unittest.makeSuite', (['all_names[name]', '"""test"""'], {}), "(all_names[name], 'test')\n", (14388, 14413), False, 'import unittest\n')] |
from django.urls import reverse
import pytest
@pytest.mark.parametrize('url_name', (
'schema-swagger-ui',
'schema-redoc',
))
def test_docs(url_name, client):
url = reverse(url_name)
response = client.get(url)
assert response.status_code == 200
@pytest.mark.parametrize('response_format', (
'.json',
'.yaml',
))
def test_schema_json(response_format, client):
url = reverse('schema-json', args=(response_format, ))
response = client.get(url)
assert response.status_code == 200
| [
"pytest.mark.parametrize",
"django.urls.reverse"
] | [((50, 124), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""url_name"""', "('schema-swagger-ui', 'schema-redoc')"], {}), "('url_name', ('schema-swagger-ui', 'schema-redoc'))\n", (73, 124), False, 'import pytest\n'), ((278, 340), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""response_format"""', "('.json', '.yaml')"], {}), "('response_format', ('.json', '.yaml'))\n", (301, 340), False, 'import pytest\n'), ((187, 204), 'django.urls.reverse', 'reverse', (['url_name'], {}), '(url_name)\n', (194, 204), False, 'from django.urls import reverse\n'), ((417, 464), 'django.urls.reverse', 'reverse', (['"""schema-json"""'], {'args': '(response_format,)'}), "('schema-json', args=(response_format,))\n", (424, 464), False, 'from django.urls import reverse\n')] |
from rest_framework import request
from rest_framework.viewsets import ModelViewSet
from django.db.models import Q
from docker_host.permissions import IsHostOperationAllowed, HostOperationMixin
from .models import Note
from .serializer import NoteSerializer
class NoteView(ModelViewSet, HostOperationMixin):
queryset = Note.objects
serializer_class = NoteSerializer
permission_classes = [IsHostOperationAllowed]
lookup_field = 'id'
permission_kind = "no"
host_pk = "host__pk"
def get_queryset(self):
return super().get_queryset().filter(
Q(host__pk=self.kwargs.get("host__pk")),
Q(is_public=True) | Q(creator=self.request.user)).order_by('title')
def perform_create(self, serializer):
serializer.save(creator=self.request.user,
host_id=self.kwargs.get("host__pk"))
| [
"django.db.models.Q"
] | [((644, 661), 'django.db.models.Q', 'Q', ([], {'is_public': '(True)'}), '(is_public=True)\n', (645, 661), False, 'from django.db.models import Q\n'), ((664, 692), 'django.db.models.Q', 'Q', ([], {'creator': 'self.request.user'}), '(creator=self.request.user)\n', (665, 692), False, 'from django.db.models import Q\n')] |
import numpy as np
#create array of weekly vaccination numbers from https://opendata-geohive.hub.arcgis.com/datasets/0101ed10351e42968535bb002f94c8c6_0.csv?outSR=%7B%22latestWkid%22%3A3857%2C%22wkid%22%3A102100%7D
a= np.array([3946,
43856,
52659,
49703,
51381,
56267,
32176,
86434,
88578,
88294,
91298,
64535,
133195,
139946,
131038,
155716,
188626,
211497,
245947,
323166,
331292,
305479,
277195,
290362,
357077,
370059,
370544,
390891,
373319,
336086,
300378,
232066,
232234,
229694,
183158,
121650,
108327,
95192,
63718,
43289,
23643,
21081,
24567,
22115,
18434,
15138,
21262,
21259,
19713,
14174,
14862,
])
#print(a.shape)
#print(np.mean(a))
#print(np.median(a))
#print(np.max(a))
#Generate min and % of adult population vaccinated per week
def uptake(value):
WkTotal = np.min(value)
WkTotalStr = "% s" % WkTotal
Str1="Minimum Uptake "
print(Str1 + WkTotalStr)
Str2="% of Population Vaccinated per Week"
print(Str2)
for i in a:
print(i / 4000000 * 100)
uptake(a)
| [
"numpy.array",
"numpy.min"
] | [((219, 633), 'numpy.array', 'np.array', (['[3946, 43856, 52659, 49703, 51381, 56267, 32176, 86434, 88578, 88294, 91298,\n 64535, 133195, 139946, 131038, 155716, 188626, 211497, 245947, 323166, \n 331292, 305479, 277195, 290362, 357077, 370059, 370544, 390891, 373319,\n 336086, 300378, 232066, 232234, 229694, 183158, 121650, 108327, 95192, \n 63718, 43289, 23643, 21081, 24567, 22115, 18434, 15138, 21262, 21259, \n 19713, 14174, 14862]'], {}), '([3946, 43856, 52659, 49703, 51381, 56267, 32176, 86434, 88578, \n 88294, 91298, 64535, 133195, 139946, 131038, 155716, 188626, 211497, \n 245947, 323166, 331292, 305479, 277195, 290362, 357077, 370059, 370544,\n 390891, 373319, 336086, 300378, 232066, 232234, 229694, 183158, 121650,\n 108327, 95192, 63718, 43289, 23643, 21081, 24567, 22115, 18434, 15138, \n 21262, 21259, 19713, 14174, 14862])\n', (227, 633), True, 'import numpy as np\n'), ((786, 799), 'numpy.min', 'np.min', (['value'], {}), '(value)\n', (792, 799), True, 'import numpy as np\n')] |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.23 on 2020-09-10 14:23
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
("chroma_core", "0024_mountsnapshotjob_unmountsnapshotjob"),
]
operations = [
migrations.CreateModel(
name="CreateSnapshotJob",
fields=[
(
"job_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="chroma_core.Job",
),
),
("fqdn", models.CharField(help_text=b"MGS host to create the snapshot on", max_length=256)),
("fsname", models.CharField(help_text=b"Lustre filesystem name", max_length=8)),
("name", models.CharField(help_text=b"Snapshot to create", max_length=64)),
(
"comment",
models.CharField(help_text=b"Optional comment for the snapshot", max_length=1024, null=True),
),
(
"use_barrier",
models.BooleanField(
default=False,
help_text=b"Set write barrier before creating snapshot. The default value is False",
),
),
],
options={
"ordering": ["id"],
},
bases=("chroma_core.job",),
),
migrations.CreateModel(
name="DestroySnapshotJob",
fields=[
(
"job_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="chroma_core.Job",
),
),
("fqdn", models.CharField(help_text=b"MGS host to destroy the snapshot on", max_length=256)),
("fsname", models.CharField(help_text=b"Lustre filesystem name", max_length=8)),
("name", models.CharField(help_text=b"Snapshot to destroy", max_length=64)),
("force", models.BooleanField(default=False, help_text=b"Destroy the snapshot with force")),
],
options={
"ordering": ["id"],
},
bases=("chroma_core.job",),
),
]
| [
"django.db.models.OneToOneField",
"django.db.models.CharField",
"django.db.models.BooleanField"
] | [((507, 676), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'auto_created': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'parent_link': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'to': '"""chroma_core.Job"""'}), "(auto_created=True, on_delete=django.db.models.deletion\n .CASCADE, parent_link=True, primary_key=True, serialize=False, to=\n 'chroma_core.Job')\n", (527, 676), False, 'from django.db import migrations, models\n'), ((879, 964), 'django.db.models.CharField', 'models.CharField', ([], {'help_text': "b'MGS host to create the snapshot on'", 'max_length': '(256)'}), "(help_text=b'MGS host to create the snapshot on',\n max_length=256)\n", (895, 964), False, 'from django.db import migrations, models\n'), ((990, 1057), 'django.db.models.CharField', 'models.CharField', ([], {'help_text': "b'Lustre filesystem name'", 'max_length': '(8)'}), "(help_text=b'Lustre filesystem name', max_length=8)\n", (1006, 1057), False, 'from django.db import migrations, models\n'), ((1085, 1149), 'django.db.models.CharField', 'models.CharField', ([], {'help_text': "b'Snapshot to create'", 'max_length': '(64)'}), "(help_text=b'Snapshot to create', max_length=64)\n", (1101, 1149), False, 'from django.db import migrations, models\n'), ((1221, 1318), 'django.db.models.CharField', 'models.CharField', ([], {'help_text': "b'Optional comment for the snapshot'", 'max_length': '(1024)', 'null': '(True)'}), "(help_text=b'Optional comment for the snapshot', max_length\n =1024, null=True)\n", (1237, 1318), False, 'from django.db import migrations, models\n'), ((1407, 1531), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'help_text': "b'Set write barrier before creating snapshot. The default value is False'"}), "(default=False, help_text=\n b'Set write barrier before creating snapshot. The default value is False')\n", (1426, 1531), False, 'from django.db import migrations, models\n'), ((1918, 2087), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'auto_created': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'parent_link': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'to': '"""chroma_core.Job"""'}), "(auto_created=True, on_delete=django.db.models.deletion\n .CASCADE, parent_link=True, primary_key=True, serialize=False, to=\n 'chroma_core.Job')\n", (1938, 2087), False, 'from django.db import migrations, models\n'), ((2290, 2376), 'django.db.models.CharField', 'models.CharField', ([], {'help_text': "b'MGS host to destroy the snapshot on'", 'max_length': '(256)'}), "(help_text=b'MGS host to destroy the snapshot on',\n max_length=256)\n", (2306, 2376), False, 'from django.db import migrations, models\n'), ((2402, 2469), 'django.db.models.CharField', 'models.CharField', ([], {'help_text': "b'Lustre filesystem name'", 'max_length': '(8)'}), "(help_text=b'Lustre filesystem name', max_length=8)\n", (2418, 2469), False, 'from django.db import migrations, models\n'), ((2497, 2562), 'django.db.models.CharField', 'models.CharField', ([], {'help_text': "b'Snapshot to destroy'", 'max_length': '(64)'}), "(help_text=b'Snapshot to destroy', max_length=64)\n", (2513, 2562), False, 'from django.db import migrations, models\n'), ((2591, 2676), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'help_text': "b'Destroy the snapshot with force'"}), "(default=False, help_text=b'Destroy the snapshot with force'\n )\n", (2610, 2676), False, 'from django.db import migrations, models\n')] |
#!/usr/bin/env python
from setuptools import setup, find_packages
#if sys.argv[-1] == 'publish':
# os.system('python setup.py sdist upload')
# sys.exit()
with open('bace/__init__.py') as fid:
for line in fid:
if line.startswith('__version__'):
VERSION = line.strip().split()[-1][1:-1]
break
with open('requirements.txt') as fid:
INSTALL_REQUIRES = [l.strip() for l in fid.readlines() if l]
readme = open('README.md').read()
doclink = """
Documentation
-------------
The full documentation is at http://bace.rtfd.org."""
VERSION = '1.0.0'
setup(
name='bace',
version=VERSION,
description='bace',
long_description=readme + '\n\n' + doclink + '\n\n',
author='<NAME>',
url='https://github.com/krzjoa/bace',
packages=find_packages(where='.', exclude=('tests')),
package_dir={'bace': 'bace'},
include_package_data=True,
install_requires=INSTALL_REQUIRES,
license='MIT',
zip_safe=False,
keywords='bayes',
classifiers=[
'Programming Language :: Python',
'License :: OSI Approved :: MIT License',
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Science/Research',
'Intended Audience :: Developers',
'Natural Language :: English',
'Topic :: Scientific/Engineering',
],
)
| [
"setuptools.find_packages"
] | [((804, 845), 'setuptools.find_packages', 'find_packages', ([], {'where': '"""."""', 'exclude': '"""tests"""'}), "(where='.', exclude='tests')\n", (817, 845), False, 'from setuptools import setup, find_packages\n')] |
#!/usr/bin/env python3
#
# Author: <NAME>
# Copyright 2015-present, NASA-JPL/Caltech
#
import os
import glob
import datetime
import numpy as np
import isce, isceobj
from isceobj.Alos2Proc.runSwathOffset import swathOffset
from StackPulic import loadTrack
from StackPulic import acquisitionModesAlos2
def cmdLineParse():
'''
command line parser.
'''
import sys
import argparse
parser = argparse.ArgumentParser(description='estimate swath offset')
parser.add_argument('-idir', dest='idir', type=str, required=True,
help = 'data directory')
parser.add_argument('-date', dest='date', type=str, required=True,
help = 'data acquisition date. format: YYMMDD')
parser.add_argument('-output', dest='output', type=str, required=True,
help = 'output file')
#parser.add_argument('-match', dest='match', type=int, default=1,
# help = 'do matching when computing adjacent swath offset. 0: no. 1: yes (default)')
parser.add_argument('-match', dest='match', action='store_true', default=False,
help='do matching when computing adjacent swath offset')
if len(sys.argv) <= 1:
print('')
parser.print_help()
sys.exit(1)
else:
return parser.parse_args()
if __name__ == '__main__':
inps = cmdLineParse()
#get user parameters from input
idir = inps.idir
date = inps.date
outputFile = inps.output
match = inps.match
#######################################################
spotlightModes, stripmapModes, scansarNominalModes, scansarWideModes, scansarModes = acquisitionModesAlos2()
frames = sorted([x[-4:] for x in glob.glob(os.path.join(idir, 'f*_*'))])
track = loadTrack(idir, date)
#save current dir
dirOriginal = os.getcwd()
os.chdir(idir)
if (track.operationMode in scansarModes) and (len(track.frames[0].swaths) >= 2):
for i, frameNumber in enumerate(frames):
frameDir = 'f{}_{}'.format(i+1, frameNumber)
os.chdir(frameDir)
mosaicDir = 'mosaic'
os.makedirs(mosaicDir, exist_ok=True)
os.chdir(mosaicDir)
#compute swath offset
offsetReference = swathOffset(track.frames[i], date+'.slc', outputFile,
crossCorrelation=match, numberOfAzimuthLooks=10)
os.chdir('../../')
else:
print('there is only one swath, no need to estimate swath offset')
| [
"StackPulic.acquisitionModesAlos2",
"argparse.ArgumentParser",
"os.makedirs",
"isceobj.Alos2Proc.runSwathOffset.swathOffset",
"os.path.join",
"StackPulic.loadTrack",
"os.getcwd",
"os.chdir",
"sys.exit"
] | [((416, 476), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""estimate swath offset"""'}), "(description='estimate swath offset')\n", (439, 476), False, 'import argparse\n'), ((1623, 1646), 'StackPulic.acquisitionModesAlos2', 'acquisitionModesAlos2', ([], {}), '()\n', (1644, 1646), False, 'from StackPulic import acquisitionModesAlos2\n'), ((1738, 1759), 'StackPulic.loadTrack', 'loadTrack', (['idir', 'date'], {}), '(idir, date)\n', (1747, 1759), False, 'from StackPulic import loadTrack\n'), ((1801, 1812), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1810, 1812), False, 'import os\n'), ((1817, 1831), 'os.chdir', 'os.chdir', (['idir'], {}), '(idir)\n', (1825, 1831), False, 'import os\n'), ((1227, 1238), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1235, 1238), False, 'import sys\n'), ((2037, 2055), 'os.chdir', 'os.chdir', (['frameDir'], {}), '(frameDir)\n', (2045, 2055), False, 'import os\n'), ((2102, 2139), 'os.makedirs', 'os.makedirs', (['mosaicDir'], {'exist_ok': '(True)'}), '(mosaicDir, exist_ok=True)\n', (2113, 2139), False, 'import os\n'), ((2152, 2171), 'os.chdir', 'os.chdir', (['mosaicDir'], {}), '(mosaicDir)\n', (2160, 2171), False, 'import os\n'), ((2237, 2346), 'isceobj.Alos2Proc.runSwathOffset.swathOffset', 'swathOffset', (['track.frames[i]', "(date + '.slc')", 'outputFile'], {'crossCorrelation': 'match', 'numberOfAzimuthLooks': '(10)'}), "(track.frames[i], date + '.slc', outputFile, crossCorrelation=\n match, numberOfAzimuthLooks=10)\n", (2248, 2346), False, 'from isceobj.Alos2Proc.runSwathOffset import swathOffset\n'), ((2393, 2411), 'os.chdir', 'os.chdir', (['"""../../"""'], {}), "('../../')\n", (2401, 2411), False, 'import os\n'), ((1696, 1722), 'os.path.join', 'os.path.join', (['idir', '"""f*_*"""'], {}), "(idir, 'f*_*')\n", (1708, 1722), False, 'import os\n')] |
# Generated by Django 2.1 on 2018-10-10 22:21
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('login', '0004_auto_20181010_1640'),
]
operations = [
migrations.AddField(
model_name='employee',
name='Username',
field=models.CharField(default='None', max_length=50),
preserve_default=False,
),
]
| [
"django.db.models.CharField"
] | [((334, 381), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""None"""', 'max_length': '(50)'}), "(default='None', max_length=50)\n", (350, 381), False, 'from django.db import migrations, models\n')] |
import asyncio
import logging
import os
from time import time
from yarl import URL
from ..client.client import Client
from ..client.request import HTTPMethod, Request
from .asynctest import AsyncTest
class TestClient(AsyncTest):
@classmethod
def setUpClass(cls):
logger = logging.getLogger('client')
if not logger.hasHandlers():
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(levelname)s: %(message)s')
sh = logging.StreamHandler()
sh.setLevel(logging.DEBUG)
sh.setFormatter(formatter)
logger.addHandler(sh)
@AsyncTest.asynchronize
async def test_basic(self):
setting = {
'headers': {'global-headers': 'global_headers_value'},
'cookies': {'global_cookies': 'global_cookies_value'},
}
client = Client(setting)
self.assertEqual(repr(client), '<Client - Pending: 0 (0 batch) Processing: 0/0>')
req = Request(
url='http://www.httpbin.org/post',
method=HTTPMethod.POST,
headers={'headers': 'headers_value'},
params={'params': 'params_value'},
meta={'k': 'v'},
)
try:
resp = await client.submit(req)
assert resp.status == 200
self.assertEqual(resp.url, URL('http://www.httpbin.org/post?params=params_value'))
self.assertEqual(resp.json()['headers']['Global-Headers'], 'global_headers_value')
self.assertEqual(resp.json()['headers']['Headers'], 'headers_value')
self.assertEqual(resp.json()['headers']['Cookie'], 'global_cookies=global_cookies_value')
self.assertEqual(resp.meta, {'k': 'v'})
finally:
await client.close()
@AsyncTest.asynchronize
async def test_request_body(self):
client = Client()
try:
req = Request(
url='http://www.httpbin.org/post',
method=HTTPMethod.POST,
json={'k': 'v'},
)
resp = await client.submit(req)
assert resp.status == 200
self.assertEqual(resp.json()['json'], {'k': 'v'})
req = Request(
url='http://www.httpbin.org/post',
method=HTTPMethod.POST,
form={'k': 'v'},
)
resp = await client.submit(req)
assert resp.status == 200
self.assertEqual(resp.json()['form'], {'k': 'v'})
req = Request(
url='http://www.httpbin.org/post',
method=HTTPMethod.POST,
body=b'body',
)
resp = await client.submit(req)
assert resp.status == 200
self.assertEqual(resp.json()['data'], 'body')
self.assertEqual(resp.json()['headers']['Content-Type'], 'application/octet-stream')
req = Request(
url='http://www.httpbin.org/post',
method=HTTPMethod.POST,
text='text',
)
resp = await client.submit(req)
assert resp.status == 200
self.assertEqual(resp.json()['data'], 'text')
self.assertEqual(resp.json()['headers']['Content-Type'], 'text/plain; charset=utf-8')
with open('./.t', 'wb') as file:
file.write(b'file')
req = Request(
url='http://www.httpbin.org/post',
method=HTTPMethod.POST,
file='./.t',
)
resp = await client.submit(req)
assert resp.status == 200
self.assertEqual(resp.json()['data'], 'file')
self.assertEqual(resp.json()['headers']['Content-Type'], 'application/octet-stream')
self.assertEqual(resp.json()['headers']['Transfer-Encoding'], 'chunked')
os.remove('./.t')
finally:
await client.close()
@AsyncTest.asynchronize
async def test_timeout(self):
req = Request(
url='https://www.google.com',
timeout=2,
retry=1,
retry_interval=1,
sleep=2,
)
try:
client = Client()
start = time()
resp = await client.submit(req)
finish = time()
self.assertTrue(5 < finish - start < 10)
self.assertEqual(resp.status, -1)
self.assertIn("TimeoutError('2s'", resp.reason)
self.assertEqual(resp.content, b'')
finally:
await client.close()
@AsyncTest.asynchronize
async def test_concurrency(self):
baidu_urls = [f'https://www.baidu.com/s?wd={i}' for i in range(5)]
douban_urls = [f'https://movie.douban.com/top250?start={i}&filter=' for i in range(0, 25*5, 25)]
requests = []
[requests.extend([Request(a), Request(b)]) for a, b in zip(baidu_urls, douban_urls)]
try:
client = Client()
future_1 = client.submit(requests[:5])
future_2 = client.submit(requests[5:])
print(client)
await asyncio.sleep(0.2)
print(client)
results = await future_1
results += await future_2
self.assertEqual(requests, [resp.request for resp in results])
finally:
await client.close()
| [
"logging.getLogger",
"logging.StreamHandler",
"logging.Formatter",
"asyncio.sleep",
"yarl.URL",
"time.time",
"os.remove"
] | [((293, 320), 'logging.getLogger', 'logging.getLogger', (['"""client"""'], {}), "('client')\n", (310, 320), False, 'import logging\n'), ((425, 486), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s - %(levelname)s: %(message)s"""'], {}), "('%(asctime)s - %(levelname)s: %(message)s')\n", (442, 486), False, 'import logging\n'), ((504, 527), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (525, 527), False, 'import logging\n'), ((3903, 3920), 'os.remove', 'os.remove', (['"""./.t"""'], {}), "('./.t')\n", (3912, 3920), False, 'import os\n'), ((4267, 4273), 'time.time', 'time', ([], {}), '()\n', (4271, 4273), False, 'from time import time\n'), ((4339, 4345), 'time.time', 'time', ([], {}), '()\n', (4343, 4345), False, 'from time import time\n'), ((1364, 1418), 'yarl.URL', 'URL', (['"""http://www.httpbin.org/post?params=params_value"""'], {}), "('http://www.httpbin.org/post?params=params_value')\n", (1367, 1418), False, 'from yarl import URL\n'), ((5154, 5172), 'asyncio.sleep', 'asyncio.sleep', (['(0.2)'], {}), '(0.2)\n', (5167, 5172), False, 'import asyncio\n')] |
#########################################
## Written by <EMAIL>
## Script to update the desiredCount (# of tasks) for all services have less running than desired.
#########################################
### Parameters ###
cluster = ''
region = ''
desiredCount = 0
# AWS Profile Name (Optional)
profile_name = ''
###################
import boto3
from botocore.exceptions import ProfileNotFound
from botocore.exceptions import ClientError
try:
session = boto3.session.Session(profile_name=profile_name)
client = session.client('ecs', region_name=region)
except ProfileNotFound as e:
print(e)
try:
print("Trying without profile...")
client = boto3.client('ecs', region_name=region)
except ClientError as e:
print(e)
try:
services = client.list_services(cluster=cluster, maxResults=100)
except ClientError as e:
print(e)
print('---------------------')
service_names = []
for i in services['serviceArns']:
service_names.append(i.split('/', 1)[1])
for j in service_names:
# if(j.startswith('sfm')):
response = client.describe_services(cluster=cluster, services=[j])
# print(response)
current_desiredCount = response['services'][0]['desiredCount']
current_runningCount = response['services'][0]['runningCount']
ServiceName = response['services'][0]['serviceName']
if current_runningCount < current_desiredCount:
print("Services that have more desired than running will now be scaled down...")
print("ServiceName: " + ServiceName)
print("desiredCount = " + str(current_desiredCount) + " runningCount = " + str(current_runningCount))
client.update_service(cluster=cluster, service=j, desiredCount=desiredCount)
print("Updated service: " + j + " to a desired count of: " + str(desiredCount))
| [
"boto3.client",
"boto3.session.Session"
] | [((463, 511), 'boto3.session.Session', 'boto3.session.Session', ([], {'profile_name': 'profile_name'}), '(profile_name=profile_name)\n', (484, 511), False, 'import boto3\n'), ((678, 717), 'boto3.client', 'boto3.client', (['"""ecs"""'], {'region_name': 'region'}), "('ecs', region_name=region)\n", (690, 717), False, 'import boto3\n')] |
import json
from datetime import timedelta
import dateutil.parser
from flask import Blueprint, request
from app.models.main import Channel, Performer, Song, Play
# Response codes
CODE_KO = 1
CODE_OK = 0
music_ws = Blueprint('music_ws', __name__)
@music_ws.route('/', methods=['GET'])
def index():
return 'Hello this is dog "/"'
def build_response(result, code, errors=None):
r = {'result': result, 'code': code}
if errors:
r['errors'] = errors
return json.dumps(r)
# INGESTION
@music_ws.route('/add_channel', methods=['POST'])
def add_channel():
channel = request.values.get('name', '')
r = {'result': "", 'code': CODE_KO}
if channel:
Channel.objects(name=channel).update_one(upsert=True, name=channel)
r['result'] = "Channel '%s' added/updated" % channel
r['code'] = CODE_OK
if not channel:
r['errors'] = ['Channel name not provided']
return build_response(**r)
@music_ws.route('/add_performer', methods=['POST'])
def add_performer():
performer = request.values.get('name', '')
r = {'result': "", 'code': CODE_KO}
if performer:
Performer.objects(name=performer).update_one(upsert=True, name=performer)
r['result'] = "Performer '%s' added/updated" % performer
r['code'] = CODE_OK
if not performer:
r['errors'] = ['Performer name not provided']
return build_response(**r)
@music_ws.route('/add_song', methods=['POST'])
def add_song():
title = request.values.get('title', '')
performer = request.values.get('performer', '')
r = {'result': "", 'code': CODE_KO}
if title and performer:
Song.objects(title=title, performer=performer).update_one(upsert=True, title=title, performer=performer)
r['result'] = "Song '%s' by '%s' added/updated" % (title, performer)
r['code'] = CODE_OK
if not (title and performer):
r['errors'] = ['Title or performer not provided']
return build_response(**r)
@music_ws.route('/add_play', methods=['POST'])
def add_play():
title = request.values.get('title', '')
performer = request.values.get('performer', 'unknown-performer')
start = request.values.get('start', '')
end = request.values.get('end', '')
channel = request.values.get('channel', '')
r = {'result': "", 'code': CODE_KO, 'errors': []}
necessary_data = all([title, performer, start, end, channel])
if necessary_data:
dates_parsed = _parse_date_helper([start, end])
if dates_parsed:
parsed_start, parsed_end = dates_parsed
play_data = dict(title=title, performer=performer, start=parsed_start, end=parsed_end, channel=channel)
Play.objects(**play_data).update_one(upsert=True, **play_data)
r['result'] = "Play '%s' added/updated" % (", ".join(["%s: %s" % (k, v) for k, v in play_data.items()]))
r['code'] = CODE_OK
else:
r['errors'].append("Invalid date format, please provide dates in UTC ISO 8601")
if not necessary_data:
r['errors'].append('Title, Performer, Start, End or Channel not provided')
return build_response(**r)
# REQUEST
@music_ws.route('/get_song_plays', methods=['GET'])
def get_song_plays():
title = request.values.get('title', '')
performer = request.values.get('performer', '')
start = request.values.get('start', '')
end = request.values.get('end', '')
r = {'result': [], 'code': CODE_KO, 'errors': []}
necessary_data = all([title, performer, start, end])
if necessary_data:
dates_parsed = _parse_date_helper([start, end])
if dates_parsed:
parsed_start, parsed_end = dates_parsed
plays = Play.objects(start__gte=parsed_start, end__lte=parsed_end, title=title, performer=performer)
r['result'] = prepare_song_plays(plays)
r['code'] = CODE_OK
else:
r['errors'].append("Invalid date format, please provide dates in UTC ISO 8601")
if not necessary_data:
r['errors'].append('Title, Performer, Start or End not provided')
return build_response(**r)
@music_ws.route('/get_channel_plays', methods=['GET'])
def get_channel_plays():
channel = request.values.get('channel', '')
start = request.values.get('start', '')
end = request.values.get('end', '')
r = {'result': [], 'code': CODE_KO, 'errors': []}
necessary_data = all([channel, start, end])
if necessary_data:
dates_parsed = _parse_date_helper([start, end])
if dates_parsed:
parsed_start, parsed_end = dates_parsed
plays = Play.objects(start__gte=parsed_start, end__lte=parsed_end, channel=channel)
r['result'] = prepare_channel_plays(plays)
r['code'] = CODE_OK
else:
r['errors'].append("Invalid date format, please provide dates in UTC ISO 8601")
if not necessary_data:
r['errors'].append('Title, Performer, Start or End not provided')
return build_response(**r)
@music_ws.route('/get_top', methods=['GET'])
def get_top():
channels = json.loads(request.values.get('channels', '{}'))
start = request.values.get('start', '')
r = {'result': [], 'code': CODE_KO, 'errors': []}
try:
limit = int(request.values.get('limit', 0))
except:
r['errors'].append("Invalid limit, provide a valid integer")
return build_response(**r)
start_parsed = _parse_date_helper(start)
if not start_parsed:
r['errors'].append("Invalid date format, please provide dates in UTC ISO 8601")
return build_response(**r)
# from a given date substract its position in the week, so we get start and end of the week of the provided date.
start_week = start_parsed[0] - timedelta(days=start_parsed[0].weekday())
end_week = start_week + timedelta(days=6)
# calculate current week
top_plays = get_top_aggregate(channels, start_week, end_week, limit)
# calculate past week
# TODO : this query should be cached or pre-calculated in another structure.
lastweek_start = start_week - timedelta(days=7)
lastweek_end = lastweek_start + timedelta(days=6)
top_plays_lastweek = get_top_aggregate(channels, lastweek_start, lastweek_end, limit)
r['result'] = prepare_top_plays(top_plays, top_plays_lastweek)
r['code'] = CODE_OK
return build_response(**r)
# Helpers
def _parse_date_helper(dates):
"""
Date helper to parse dates in UTC ISO 8601 format.
Accepts single date or list.
"""
dates = [dates] if type(dates) is not list else dates
try:
return map(lambda d: dateutil.parser.parse(d), dates)
except Exception:
return []
# TODO : prepare_song_plays and prepare_channel_plays could be more generic.
def prepare_song_plays(plays):
_plays = []
for plays in plays:
_plays.append({
'channel': plays.channel,
'start': plays.start.isoformat(),
'end': plays.end.isoformat()
})
return _plays
def prepare_channel_plays(plays):
_plays = []
for plays in plays:
_plays.append({
'performer': plays.performer,
'title': plays.title,
'start': plays.start.isoformat(),
'end': plays.end.isoformat()
})
return _plays
def prepare_top_plays(plays, lastweek_plays):
"""
Join current plays with last week.
TODO : This is a temporary process, all top plays should be summarized and stored in the DB.
"""
for rank, p in enumerate(plays):
p['previous_plays'] = 0 # default value
p['previous_rank'] = None # default value
p['rank'] = rank
for rank_lastweek, lp in enumerate(lastweek_plays):
if p['title'] == lp['title'] and p['performer'] == lp['performer']:
# that guy was in the last week, push up his data to plays list.
p['previous_plays'] = lp['plays']
p['previous_rank'] = rank_lastweek
return plays
def get_top_aggregate(channels, start_week, end_week, limit):
plays = Play._get_collection().aggregate([
{
"$match": {
"channel": {"$in": channels},
"start": {"$gte": start_week},
"end": {"$lte": end_week}
}
},
{
"$group": {
"_id": {
"performer": "$performer",
"title": "$title"
},
"plays": {"$sum": 1}
}
},
{
"$project": {
"_id": 0,
"performer": "$_id.performer",
"title": "$_id.title",
"plays": 1
}
},
{"$sort": {"plays": -1}},
{"$limit": limit}
])
return list(plays) | [
"app.models.main.Performer.objects",
"json.dumps",
"app.models.main.Play._get_collection",
"app.models.main.Play.objects",
"app.models.main.Song.objects",
"flask.request.values.get",
"datetime.timedelta",
"flask.Blueprint",
"app.models.main.Channel.objects"
] | [((218, 249), 'flask.Blueprint', 'Blueprint', (['"""music_ws"""', '__name__'], {}), "('music_ws', __name__)\n", (227, 249), False, 'from flask import Blueprint, request\n'), ((483, 496), 'json.dumps', 'json.dumps', (['r'], {}), '(r)\n', (493, 496), False, 'import json\n'), ((596, 626), 'flask.request.values.get', 'request.values.get', (['"""name"""', '""""""'], {}), "('name', '')\n", (614, 626), False, 'from flask import Blueprint, request\n'), ((1045, 1075), 'flask.request.values.get', 'request.values.get', (['"""name"""', '""""""'], {}), "('name', '')\n", (1063, 1075), False, 'from flask import Blueprint, request\n'), ((1496, 1527), 'flask.request.values.get', 'request.values.get', (['"""title"""', '""""""'], {}), "('title', '')\n", (1514, 1527), False, 'from flask import Blueprint, request\n'), ((1544, 1579), 'flask.request.values.get', 'request.values.get', (['"""performer"""', '""""""'], {}), "('performer', '')\n", (1562, 1579), False, 'from flask import Blueprint, request\n'), ((2069, 2100), 'flask.request.values.get', 'request.values.get', (['"""title"""', '""""""'], {}), "('title', '')\n", (2087, 2100), False, 'from flask import Blueprint, request\n'), ((2117, 2169), 'flask.request.values.get', 'request.values.get', (['"""performer"""', '"""unknown-performer"""'], {}), "('performer', 'unknown-performer')\n", (2135, 2169), False, 'from flask import Blueprint, request\n'), ((2182, 2213), 'flask.request.values.get', 'request.values.get', (['"""start"""', '""""""'], {}), "('start', '')\n", (2200, 2213), False, 'from flask import Blueprint, request\n'), ((2224, 2253), 'flask.request.values.get', 'request.values.get', (['"""end"""', '""""""'], {}), "('end', '')\n", (2242, 2253), False, 'from flask import Blueprint, request\n'), ((2268, 2301), 'flask.request.values.get', 'request.values.get', (['"""channel"""', '""""""'], {}), "('channel', '')\n", (2286, 2301), False, 'from flask import Blueprint, request\n'), ((3268, 3299), 'flask.request.values.get', 'request.values.get', (['"""title"""', '""""""'], {}), "('title', '')\n", (3286, 3299), False, 'from flask import Blueprint, request\n'), ((3316, 3351), 'flask.request.values.get', 'request.values.get', (['"""performer"""', '""""""'], {}), "('performer', '')\n", (3334, 3351), False, 'from flask import Blueprint, request\n'), ((3364, 3395), 'flask.request.values.get', 'request.values.get', (['"""start"""', '""""""'], {}), "('start', '')\n", (3382, 3395), False, 'from flask import Blueprint, request\n'), ((3406, 3435), 'flask.request.values.get', 'request.values.get', (['"""end"""', '""""""'], {}), "('end', '')\n", (3424, 3435), False, 'from flask import Blueprint, request\n'), ((4237, 4270), 'flask.request.values.get', 'request.values.get', (['"""channel"""', '""""""'], {}), "('channel', '')\n", (4255, 4270), False, 'from flask import Blueprint, request\n'), ((4283, 4314), 'flask.request.values.get', 'request.values.get', (['"""start"""', '""""""'], {}), "('start', '')\n", (4301, 4314), False, 'from flask import Blueprint, request\n'), ((4325, 4354), 'flask.request.values.get', 'request.values.get', (['"""end"""', '""""""'], {}), "('end', '')\n", (4343, 4354), False, 'from flask import Blueprint, request\n'), ((5175, 5206), 'flask.request.values.get', 'request.values.get', (['"""start"""', '""""""'], {}), "('start', '')\n", (5193, 5206), False, 'from flask import Blueprint, request\n'), ((5125, 5161), 'flask.request.values.get', 'request.values.get', (['"""channels"""', '"""{}"""'], {}), "('channels', '{}')\n", (5143, 5161), False, 'from flask import Blueprint, request\n'), ((5856, 5873), 'datetime.timedelta', 'timedelta', ([], {'days': '(6)'}), '(days=6)\n', (5865, 5873), False, 'from datetime import timedelta\n'), ((6119, 6136), 'datetime.timedelta', 'timedelta', ([], {'days': '(7)'}), '(days=7)\n', (6128, 6136), False, 'from datetime import timedelta\n'), ((6173, 6190), 'datetime.timedelta', 'timedelta', ([], {'days': '(6)'}), '(days=6)\n', (6182, 6190), False, 'from datetime import timedelta\n'), ((3724, 3820), 'app.models.main.Play.objects', 'Play.objects', ([], {'start__gte': 'parsed_start', 'end__lte': 'parsed_end', 'title': 'title', 'performer': 'performer'}), '(start__gte=parsed_start, end__lte=parsed_end, title=title,\n performer=performer)\n', (3736, 3820), False, 'from app.models.main import Channel, Performer, Song, Play\n'), ((4634, 4709), 'app.models.main.Play.objects', 'Play.objects', ([], {'start__gte': 'parsed_start', 'end__lte': 'parsed_end', 'channel': 'channel'}), '(start__gte=parsed_start, end__lte=parsed_end, channel=channel)\n', (4646, 4709), False, 'from app.models.main import Channel, Performer, Song, Play\n'), ((5290, 5320), 'flask.request.values.get', 'request.values.get', (['"""limit"""', '(0)'], {}), "('limit', 0)\n", (5308, 5320), False, 'from flask import Blueprint, request\n'), ((8136, 8158), 'app.models.main.Play._get_collection', 'Play._get_collection', ([], {}), '()\n', (8156, 8158), False, 'from app.models.main import Channel, Performer, Song, Play\n'), ((692, 721), 'app.models.main.Channel.objects', 'Channel.objects', ([], {'name': 'channel'}), '(name=channel)\n', (707, 721), False, 'from app.models.main import Channel, Performer, Song, Play\n'), ((1143, 1176), 'app.models.main.Performer.objects', 'Performer.objects', ([], {'name': 'performer'}), '(name=performer)\n', (1160, 1176), False, 'from app.models.main import Channel, Performer, Song, Play\n'), ((1657, 1703), 'app.models.main.Song.objects', 'Song.objects', ([], {'title': 'title', 'performer': 'performer'}), '(title=title, performer=performer)\n', (1669, 1703), False, 'from app.models.main import Channel, Performer, Song, Play\n'), ((2707, 2732), 'app.models.main.Play.objects', 'Play.objects', ([], {}), '(**play_data)\n', (2719, 2732), False, 'from app.models.main import Channel, Performer, Song, Play\n')] |
from django.core.validators import MinValueValidator
from django.db import models
from django.utils.translation import ugettext_lazy as _
from core.models import AbstractBaseModel
class RevenueGroup(AbstractBaseModel):
name = models.CharField(_('Name'), max_length=255, unique=True)
revenue_from = models.DecimalField(_('From'), max_digits=14, decimal_places=2, validators=[MinValueValidator(0)])
revenue_to = models.DecimalField(_('To'), max_digits=14, decimal_places=2, validators=[MinValueValidator(0)])
| [
"django.core.validators.MinValueValidator",
"django.utils.translation.ugettext_lazy"
] | [((250, 259), 'django.utils.translation.ugettext_lazy', '_', (['"""Name"""'], {}), "('Name')\n", (251, 259), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((329, 338), 'django.utils.translation.ugettext_lazy', '_', (['"""From"""'], {}), "('From')\n", (330, 338), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((445, 452), 'django.utils.translation.ugettext_lazy', '_', (['"""To"""'], {}), "('To')\n", (446, 452), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((385, 405), 'django.core.validators.MinValueValidator', 'MinValueValidator', (['(0)'], {}), '(0)\n', (402, 405), False, 'from django.core.validators import MinValueValidator\n'), ((499, 519), 'django.core.validators.MinValueValidator', 'MinValueValidator', (['(0)'], {}), '(0)\n', (516, 519), False, 'from django.core.validators import MinValueValidator\n')] |
# -*- coding: utf-8 -*-
"""
Written by <NAME>
Email: danaukes<at>gmail.com
Please see LICENSE for full license.
"""
import sympy
import pynamics
class Vector(object):
def __init__(self,components=None):
self.components = {}
components=components or {}
for frame,vec in components.items():
self.add_component(frame,vec)
self.clean()
def add_component(self,frame,vector):
try:
self.components[frame]+=vector
except KeyError:
self.components[frame]=sympy.Matrix(vector)
def __str__(self):
return str(self.symbolic())
def __repr__(self):
return str(self)
def __mul__(self,other):
newvec = Vector(self.components.copy())
for key,value in newvec.components.items():
newvec.components[key] *= other
newvec.clean()
return newvec
def __rmul__(self,other):
return self.__mul__(other)
def __add__(self,other):
newvec = Vector()
newvec.components = self.components.copy()
if other!=0:
for frame,vector in other.components.items():
newvec.add_component(frame,vector)
newvec.clean()
return newvec
def __radd__(self,other):
return self.__add__(other)
def __sub__(self,other):
newvec = Vector()
newvec.components = self.components.copy()
for frame,vector in other.components.items():
newvec.add_component(frame,-vector)
newvec.clean()
return newvec
def __neg__(self):
newvec = Vector()
newvec.components = self.components.copy()
for frame,vector in newvec.components.items():
newvec.components[frame]=-vector
newvec.clean()
return newvec
def dot(self,other,frame='mid'):
from pynamics.dyadic import Dyad,Dyadic
result = sympy.Number(0)
if isinstance(other,Dyad) or isinstance(other,Dyadic):
return other.rdot(self)
return self.product_simplest(other,result,'dot',self.frame_dot,frame=frame)
# return self.product_simple(other,result,'dot',self.frame_dot)
# return self.product_by_basis_vectors(other,result,'dot',self.frame_dot)
def cross(self,other,frame='mid'):
from pynamics.dyadic import Dyad,Dyadic
result = Vector()
if isinstance(other,Dyad) or isinstance(other,Dyadic):
return other.rcross(self)
result = self.product_simplest(other,result,'cross',self.frame_cross,frame=frame)
# result = self.product_simple(other,result,'cross',self.frame_cross)
# result = self.product_by_basis_vectors(other,result,'cross',self.frame_cross)
result.clean()
return result
def unit(self):
return (1/self.length())*self
def length(self):
return (self.dot(self))**.5
def simplify(self):
newvec = Vector()
newvec.components = self.components.copy()
for frame,vector in newvec.components.items():
vector.simplify()
return newvec
@staticmethod
def frame_dot(v1,v2,frame):
return sympy.Matrix.dot(v1.components[frame],v2.components[frame])
@staticmethod
def frame_cross(v1,v2,frame):
return Vector({frame:sympy.Matrix.cross(v1.components[frame],v2.components[frame])})
# def product_in_parts(self,other,result,function,method='source'):
# from pynamics.frame import Frame
#
# for frame1,vector1 in self.components.items():
# if method=='source':
# frame = frame1
# for frame2,vector2 in other.components.items():
# if method=='dest':
# frame = frame2
# elif isinstance(method,Frame):
# frame = method
# v1 = Vector({frame1:vector1})
# v1 = v1.express(frame)
# v2 = Vector({frame2:vector2})
# v2 = v2.express(frame)
# localresult = function(v1,v2,frame)
# result=result+localresult
## result.clean()
# return result
def copy(self):
newvec = Vector(self.components.copy())
newvec.clean()
return newvec
def product_by_basis_vectors(self,other,result_seed,function,inner_function):
self = self.copy()
other = other.copy()
a = self.split_by_nonzero_basis_vectors()
b = other.split_by_nonzero_basis_vectors()
result = result_seed.copy()
for frame1 in a.keys():
for frame2 in b.keys():
if frame1!=frame2:
rep = frame1.efficient_rep(frame2,function)
for bv1 in a[frame1][0]:
for bv2 in b[frame2][0]:
efficient_frame = rep[frozenset((bv1[0],bv2[0]))]
v1 = bv1[1].express(efficient_frame)
v2 = bv2[1].express(efficient_frame)
result+=inner_function(v1,v2,efficient_frame)
else:
v1 = a[frame1][1]
v2 = b[frame2][1]
result+=inner_function(v1,v2,frame1)
# result.clean()
if len(str(result))<len(str(result.expand())):
return result
else:
return result.expand()
# result = result.expand()
# return result
# result2 = self.product_simple(other,result_seed,function,inner_function).expand()
# if len(str(result))<=len(str(result2)):
# print('1',result,result2)
# return result
# else:
# print('2',result,result2)
# return result2
def product_simplest(self,other,result_seed,function,inner_function,frame = 'mid'):
result = result_seed.copy()
for frame2,vec2 in other.components.items():
vector2 = Vector({frame2:vec2})
for frame1,vec1 in self.components.items():
if frame == 'source':
expressed_frame = frame1
elif frame == 'dest':
expressed_frame = frame2
elif frame == 'mid':
path = frame1.tree['R'].path_to(frame2.tree['R'])
m = len(path)
if m%2==0:
ii = int(m/2)-1
else:
ii = int(m/2)
expressed_frame = path[ii].myclass
else:
expressed_frame = frame
vector1 = Vector({frame1:vec1})
localresult = inner_function(vector1.express(expressed_frame),vector2.express(expressed_frame),expressed_frame)
result+=localresult
# result.clean()
return result
def product_simple(self,other,result_seed,function,inner_function):
allframes = []
frames_self = self.components.keys()
frames_other = other.components.keys()
for frame1 in frames_self:
for frame2 in frames_other:
path = frame1.tree['R'].path_to(frame2.tree['R'])
frames = [item.myclass for item in path]
allframes.extend(frames)
results = []
for frame in allframes:
v1 = self.express(frame)
v2 = other.express(frame)
results.append(result_seed+inner_function(v1,v2,frame))
lens = [len(str(item)) for item in results]
shortest = sorted(lens)[0]
result = results[lens.index(shortest)]
# result.clean()
return result
def time_derivative(self,reference_frame = None,system=None):
system = system or pynamics.get_system()
reference_frame = reference_frame or system.newtonian
result = Vector()
for frame,vector in self.components.items():
result+= Vector({frame:system.derivative(vector)})
v1 = Vector({frame:vector})
w_ = reference_frame.get_w_to(frame).express(frame)
result+=w_.cross(v1,frame = 'mid')
result.clean()
return result
def express(self,other):
self = self.copy()
# results = []
try:
# results.append(self.components.pop(other))
result = Vector({other:self.components.pop(other)})
except KeyError:
result = Vector()
# pass
for frame,vec in self.components.items():
R = frame.get_r_to(other)
rq = frame.get_rq_to(other)
if pynamics.use_quaternions:
result+=Vector({other:rq.rotate(vec)})
else:
result+=Vector({other:R*vec})
# results.append()
# result = results.pop()
# while not not results:
# result+=results.pop()
# new = Vector({other:result})
result.clean()
return result
def symbolic(self):
result = sympy.Number(0)
for frame,vec in self.components.items():
result+=frame.syms.dot(vec)
return result
def diff_partial_local(self,var):
newvec = Vector()
for frame,vec in self.components.items():
result = vec.diff(var)
newvec.components[frame] = result
newvec.clean()
return newvec
# def diff_simple(self,frame,sys=None):
# sys = sys or pynamics.get_system()
# v = self.express(frame).components[frame]
# dv = sys.derivative(v)
# newvec = Vector({frame:dv})
# newvec.clean()
# return newvec
def split_by_frame(self):
output_vectors = []
for frame,vec in self.components.items():
output_vectors.append(Vector({frame:vec}))
return output_vectors
def nonzero_basis_vectors(self):
bvs = []
for frame, vec in self.components.items():
for val,sym in zip(vec,frame.syms):
if val!=0:
bvs.append(sym)
return bvs
def split_by_nonzero_basis_vectors(self):
bvs = {}
for frame, vec in self.components.items():
if vec!=sympy.Matrix([0,0,0]):
bvs[frame] = [],Vector({frame:vec})
for val,sym,bv in zip(vec,frame.syms,[frame.x,frame.y,frame.z]):
if val!=0:
bvs[frame][0].append((sym,val*bv))
return bvs
def expand(self):
new = self.copy()
for key in new.components:
new.components[key] = new.components[key].expand()
return new
def atoms(self,*args,**kwargs):
atoms = []
for value in self.components.values():
atoms.extend(value.atoms(*args,**kwargs))
return set(atoms)
def clean(self):
zero_keys = [frame for frame,vector in self.components.items() if vector.is_zero]
for key in zero_keys:
self.components.pop(key)
return self
def frames(self):
nonzero_frames = [frame for frame,vector in self.components.items() if not vector.is_zero]
return nonzero_frames
def subs(self,*args,**kwargs):
new = self.copy()
for key in self.components:
new.components[key] = self.components[key].subs(*args,**kwargs)
return new
| [
"sympy.Matrix",
"sympy.Matrix.dot",
"sympy.Matrix.cross",
"sympy.Number",
"pynamics.get_system"
] | [((1929, 1944), 'sympy.Number', 'sympy.Number', (['(0)'], {}), '(0)\n', (1941, 1944), False, 'import sympy\n'), ((3211, 3271), 'sympy.Matrix.dot', 'sympy.Matrix.dot', (['v1.components[frame]', 'v2.components[frame]'], {}), '(v1.components[frame], v2.components[frame])\n', (3227, 3271), False, 'import sympy\n'), ((9188, 9203), 'sympy.Number', 'sympy.Number', (['(0)'], {}), '(0)\n', (9200, 9203), False, 'import sympy\n'), ((7903, 7924), 'pynamics.get_system', 'pynamics.get_system', ([], {}), '()\n', (7922, 7924), False, 'import pynamics\n'), ((554, 574), 'sympy.Matrix', 'sympy.Matrix', (['vector'], {}), '(vector)\n', (566, 574), False, 'import sympy\n'), ((3353, 3415), 'sympy.Matrix.cross', 'sympy.Matrix.cross', (['v1.components[frame]', 'v2.components[frame]'], {}), '(v1.components[frame], v2.components[frame])\n', (3371, 3415), False, 'import sympy\n'), ((10401, 10424), 'sympy.Matrix', 'sympy.Matrix', (['[0, 0, 0]'], {}), '([0, 0, 0])\n', (10413, 10424), False, 'import sympy\n')] |
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
"""pipeformer."""
import uuid
from typing import Iterator, Optional
from .deploy import Deployer
from .identifiers import __version__
from .internal.arg_parsing import parse_args
from .internal.logging_utils import setup_logger
from .internal.structures import Config
__all__ = ("__version__", "cli")
def cli(raw_args: Optional[Iterator[str]] = None):
"""CLI entry point. Processes arguments, sets up the key provider, and processes requested action.
:returns: Execution return value intended for ``sys.exit()``
"""
args = parse_args(raw_args)
setup_logger(args.verbosity, args.quiet)
# 1. parse config file
project = Config.from_file(args.config)
# TODO: Use a better prefix
prefix = "pipeformer-" + str(uuid.uuid4()).split("-")[-1]
project_deployer = Deployer(project=project, stack_prefix=prefix)
project_deployer.deploy_standalone()
| [
"uuid.uuid4"
] | [((1311, 1323), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (1321, 1323), False, 'import uuid\n')] |
from django.urls import path, include
from rest_framework import routers
from . import views
router = routers.DefaultRouter()
router.register("category", views.CategoryViewSet)
router.register("product", views.ProductViewSet)
urlpatterns = [
path("", include(router.urls))
]
| [
"rest_framework.routers.DefaultRouter",
"django.urls.include"
] | [((103, 126), 'rest_framework.routers.DefaultRouter', 'routers.DefaultRouter', ([], {}), '()\n', (124, 126), False, 'from rest_framework import routers\n'), ((257, 277), 'django.urls.include', 'include', (['router.urls'], {}), '(router.urls)\n', (264, 277), False, 'from django.urls import path, include\n')] |
import numpy as np
import pandas as pd
data = pd.DataFrame(data=pd.read_csv('enjoysport.csv'))
concepts = np.array(data.iloc[:,0:-1])
print('Concepts:', concepts)
target = np.array(data.iloc[:,-1])
print('Target:', target)
def learn(concepts, target):
print("Initialization of specific_h and general_h")
specific_h = concepts[0].copy()
print('\t specific_h:', specific_h)
general_h = [["?" for i in range(len(specific_h))] for i in range(len(specific_h))]
print('\t general_h:', general_h)
for i, h in enumerate(concepts):
if target[i] == "yes":
for x in range(len(specific_h)):
if h[x]!= specific_h[x]:
specific_h[x] ='?'
general_h[x][x] ='?'
if target[i] == "no":
for x in range(len(specific_h)):
if h[x]!= specific_h[x]:
general_h[x][x] = specific_h[x]
else:
general_h[x][x] = '?'
print("\n Steps of Candidate Elimination Algorithm",i+1)
print('\t specific_h', specific_h)
print('\t general_h:', general_h)
indices = [i for i, val in enumerate(general_h) if val == ['?', '?', '?', '?', '?', '?']]
for i in indices:
general_h.remove(['?', '?', '?', '?', '?', '?'])
return specific_h, general_h
s_final, g_final = learn(concepts, target)
print("\n Final specific_h:", s_final, sep="\n")
print("\n Final general_h:", g_final, sep="\n")
| [
"numpy.array",
"pandas.read_csv"
] | [((111, 139), 'numpy.array', 'np.array', (['data.iloc[:, 0:-1]'], {}), '(data.iloc[:, 0:-1])\n', (119, 139), True, 'import numpy as np\n'), ((180, 206), 'numpy.array', 'np.array', (['data.iloc[:, -1]'], {}), '(data.iloc[:, -1])\n', (188, 206), True, 'import numpy as np\n'), ((67, 96), 'pandas.read_csv', 'pd.read_csv', (['"""enjoysport.csv"""'], {}), "('enjoysport.csv')\n", (78, 96), True, 'import pandas as pd\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#######################################
#-------------------------------------#
# Module: Frontera Eficiente #
#-------------------------------------#
# Creado: #
# 20. 04. 2019 #
# Ult. modificacion: #
# 23. 04. 2019 #
#-------------------------------------#
# Autor: <NAME> #
#-------------------------------------#
#-------------------------------------#
#-------------------------------------#
#-------------------------------------#
#######################################
import pandas as pd
import numpy as np
import seaborn as sns
sns.set(font_scale=1.5)
import datetime as dt
import matplotlib.pylab as plt
from sklearn import linear_model
from sklearn.model_selection import train_test_split
import os
import pywt
from statsmodels.robust import mad
import statsmodels.formula.api as sm
import warnings
warnings.simplefilter(action='ignore', category=FutureWarning)
warnings.filterwarnings(action="ignore", module="scipy", message="^internal gelsd")
import scipy.optimize as sco
#########################################
#---------------------------------------#
# Aux. functions
#---------------------------------------#
#########################################
def alpha_0( Num_Days ):
return 2./(Num_Days + 1.0)
def STDerror( m, b, sdata ):
time = [t for t in range(0,len(sdata))]
x = [(m*t + b) for t in time]
mt = np.mean(time)
num_slope = 0
den_slope = 0
for i in range(0,len(sdata)):
num_slope += (sdata[i] - x[i])**2
den_slope += (i - mt)**2
num_slope = np.sqrt(num_slope/(len(sdata)-2))
den1= np.sqrt(den_slope)
den2 = np.sqrt(len(sdata)*den_slope)
return [num_slope/den1, num_slope/den2]
def Slope(x1, y1, x2, y2):
slope = (y2-y1)/(x2-x1)
return slope
def YInt(x1, y1, x2, y2):
m = Slope(x1, y1, x2, y2)
return y1 - m*x1
#########################################
# END: Aux. functions #
#########################################
#########################################
#---------------------------------------#
# getData #
#---------------------------------------#
#########################################
class getData:
def __init__( self, file ):
self.file = file
# ----- #
df = pd.read_csv(self.file, index_col = 0)
df = self.index_to_datetime(df)
self.n = 22 # Days to ATR
# ----- #
self.timeseries = df
self.truerange = self.truerange()
self.atr = self.atr()
self.atr_return = self.atr_return()
self.cum_sum = self.cum_sum()
self.dataframe = self.dataframe()
def index_to_datetime( self, df ):
df.index = df.index.astype('str')
df.index = df.index.to_datetime()
return df
def truerange( self ):
adf = self.timeseries
s1 = pd.Series(np.abs(adf.DHigh - adf.DLow))
s2 = pd.Series(np.abs(adf.DHigh - adf.DClose.shift()))
s3 = pd.Series(np.abs(adf.DLow - adf.DClose.shift()))
TR = pd.Series(pd.concat([s1,s2,s3],axis=1).max(axis=1), name = 'TrueRange')
return TR
def atr( self ):
n = self.n
TR = self.truerange
ATR = pd.Series(pd.ewma(TR, span = n, min_periods = n), name = 'ATR_{}'.format(n))
return ATR
def atr_return( self ):
tday = self.timeseries.DClose
yday = self.timeseries.DClose.shift()
atryday = self.atr.shift()
atr_ret = (tday - yday) / atryday
atr_ret = atr_ret.rename('ATR_RET')
return atr_ret
def cum_sum( self ):
atr_ret = self.atr_return
cum_sum = atr_ret.cumsum(axis = 0)
cum_sum = cum_sum.rename('PATR')
return cum_sum
def dataframe( self ):
cols = ['DOpen', 'DHigh', 'DLow', 'DClose', 'TrueRange', 'ATR_{}'.format(22)]
cols += ['ATR_RET', 'PATR']
adf = self.timeseries.join([self.truerange,self.atr,self.atr_return,self.cum_sum])
adf = adf[cols]
return adf
def plot( self, Series, *args):
fig, ax = plt.subplots(1,figsize=(10, 7))
ser = self.dataframe[Series]
ser.plot()
plt.xlabel('Year')
plt.ylabel(Series)
if len(args) != 0:
plt.title(args[0])
plt.show()
#########################################
# END: getData #
#########################################
#########################################
#---------------------------------------#
# Regression #
#---------------------------------------#
#########################################
class Regression:
def __init__( self, data ):
self.time = range(0,len(data))
self.data = data
self.simple = self.SimpleRegression(self.time, self.data)
def Results( self ):
txts = 'Simple:\n\tSlope: {0:.5f}\tIntercept: {1:.5f}\n'.format(self.simple.slope, self.simple.intercept)
txts += '\tSSE: {0:.5f}\tISE: {1:.5f}\n\t'.format(self.simple.sse, self.simple.ise)
print ( txts )
class SimpleRegression:
def __init__(self, time, data):
X = data
y = [t for t in range(0,len(data))]
df = pd.concat([pd.Series(y,index=X.index,name='time'),X],axis=1)
model = sm.ols(formula='time ~ PATR', data=df)
result = model.fit()
self.slope = result.params[1]
self.intercept = result.params[0]
self.sse = STDerror(self.slope, self.intercept, data)[0] # Compared to the initial data
self.ise = STDerror(self.slope, self.intercept, data)[1] # Compared to the initial data
#########################################
# END: Regression #
#########################################
#########################################
#---------------------------------------#
# RegressionML #
#---------------------------------------#
#########################################
class RegressionML:
def __init__( self, data ):
self.time = range(0,len(data))
self.data = data
self.model = linear_model.LinearRegression()
self.simple = self.SimpleRegression(self.model, self.time, self.data)
def Results( self ):
txts = 'Simple Regression:\n\tSlope: {0:.5f}\tIntercept: {1:.5f}\n'.format(self.simple.slope, self.simple.intercept)
txts += '\tSSE: {0:.5f}\tISE: {1:.5f}\n\t'.format(self.simple.sse, self.simple.ise)
print ( txts )
def Plot( self, *args ):
fig, ax1 = plt.subplots(1,figsize=(10, 7))
ax1.plot(self.data,linestyle='-.',color='g',label='ATR Return (cumsum)')
ti = self.data.index[0]
tf = self.data.index[-1]
if len(args) == 0:
plt.xticks(rotation=30)
plt.legend()
plt.show()
else:
if args[0] == 's':
yi = self.simple.intercept
yf = self.simple.slope*(len(self.data)) + self.simple.intercept
ax1.plot([ti,tf],[yi,yf],color='r', label = 'Simple Regression')
plt.xticks(rotation=30)
plt.legend()
plt.show()
class SimpleRegression:
def __init__(self, model, time, data):
t = time
x = data
X_train, X_test, y_train, y_test = train_test_split(t, x, test_size=0., random_state=1)
X_train = [[i] for i in X_train]
model.fit(X_train,y_train)
self.slope = model.coef_[0]
self.intercept = model.intercept_
self.sse = STDerror(self.slope, self.intercept, data)[0] # Compared to the initial data
self.ise = STDerror(self.slope, self.intercept, data)[1] # Compared to the initial data
#########################################
# END: RegressionML #
#########################################
#######################################
#-------------------------------------#
# Portfolio #
#-------------------------------------#
#######################################
def portfolio( weights, mean_ret, cov_mat, riskfreerate):
mu = mean_ret.dot(weights)*250
sigma = np.sqrt(weights.dot(cov_mat.dot(weights)))*np.sqrt(250)
sharpe = (mu-riskfreerate)/sigma
return mu, sigma, sharpe # Expected value, Volatility, Sharpe ratio
def getReturn( weights, mean_ret, cov_mat, riskfreerate):
return portfolio(weights,mean_ret,cov_mat,riskfreerate)[0]
def getVolatility( weights, mean_ret, cov_mat, riskfreerate):
return portfolio(weights,mean_ret,cov_mat,riskfreerate)[1]
def negSharpeRatio( weights, mean_ret, cov_mat, riskfreerate):
return -portfolio(weights,mean_ret,cov_mat,riskfreerate)[2]
def random_weights(n):
k = np.random.random(n)
return k / sum(k)
#######################################
#-------------------------------------#
#######################################
#######################################
#-------------------------------------#
# Simulation #
#-------------------------------------#
#######################################
class simulation:
def __init__( self, stocks, data, riskfreerate, n_portfolios ):
self.stocks = stocks
self.rfr = riskfreerate
self.data = data
self.n_portfolios = n_portfolios
self.returns = data.pct_change()[1:]
self.mean_ret = self.returns.mean()
self.cov_mat = self.returns.cov()
self.simulation = self.do_simulation()
self.results = self.simulation[0]
self.max_sharpe_portfolio = self.simulation[1]
self.min_volatility_portfolio = self.simulation[2]
def do_simulation( self ):
means,stdvs,shrps,weights = [],[],[],[]
for i in range(self.n_portfolios):
w = random_weights(len(self.stocks))
p = portfolio(w,self.mean_ret,self.cov_mat,self.rfr)
means.append(p[0])
stdvs.append(p[1])
shrps.append(p[2])
weights.append(w)
# Convert to DataFrame
wght = {}
for i in range(len(self.stocks)):
wght[self.stocks[i]] = [j[i] for j in weights]
aux = {'Returns': means,'Volatility': stdvs,'Sharpe Ratio': shrps}
results = {**wght, **aux}
df = pd.DataFrame.from_dict(results)
max_sha_port = df.iloc[df['Sharpe Ratio'].idxmax()]
min_vol_port = df.iloc[df['Volatility'].idxmin()]
return df, max_sha_port, min_vol_port
def plot( self ):
df = self.simulation[0]
max_sh = self.simulation[1]
min_vol= self.simulation[2]
# Scatter plot colored by Sharpe Ratio
plt.style.use('seaborn-dark')
fig, ax = plt.subplots(figsize=(10,7))
df.plot(ax= ax, kind='scatter',x='Volatility', y='Returns', c='Sharpe Ratio', cmap='RdYlGn', edgecolors='black', grid=True, label = 'MC Simulation')
# Maximum Sharpe Ratio
ax.scatter(x=max_sh['Volatility'],y=max_sh['Returns'],marker='D',c='r',s=100,label='Maximum Sharpe Ratio')
# Minimum variance
ax.scatter(x=min_vol['Volatility'],y=min_vol['Returns'],marker='D',c='b',s=100,label='Minimum Volatility')
plt.legend()
ax.set_xlabel('Volatility (Std. Deviation)', fontsize=15)
ax.set_ylabel('Expected Returns', fontsize=15)
ax.set_title('Efficient Frontier', fontsize=22)
plt.show()
def print( self ):
max_sh = self.simulation[1]
min_vol= self.simulation[2]
print('Maximum Sharpe Ratio:\n{}'.format(
max_sh[max_sh.index.tolist()[0:len(self.stocks)]].to_frame(name='Weights').T))
print('{}'.format(max_sh[max_sh.index.tolist()[len(self.stocks):]].to_frame(name='Results').T))
print('\nMinimum Volatility:\n{}'.format(
min_vol[min_vol.index.tolist()[0:len(self.stocks)]].to_frame(name='Weights').T))
print('{}'.format(min_vol[min_vol.index.tolist()[len(self.stocks):]].to_frame(name='Results').T))
#######################################
# END: Simulation #
#######################################
#######################################
#-------------------------------------#
# Theory #
#-------------------------------------#
#######################################
def MaxSharpeRatio(meanReturns, covMatrix, riskFreeRate):
numAssets = len(meanReturns)
args = (meanReturns, covMatrix, riskFreeRate)
constraints = ({'type': 'eq', 'fun': lambda x: np.sum(x) - 1})
bounds = tuple( (0,1) for asset in range(numAssets))
opts = sco.minimize(negSharpeRatio, numAssets*[1./numAssets,], args=args, method='SLSQP', bounds=bounds, constraints=constraints)
return opts
def MinVolatility(meanReturns, covMatrix, riskFreeRate):
numAssets = len(meanReturns)
args = (meanReturns, covMatrix, riskFreeRate)
constraints = ({'type': 'eq', 'fun': lambda x: np.sum(x) - 1})
bounds = tuple( (0,1) for asset in range(numAssets))
opts = sco.minimize(getVolatility, numAssets*[1./numAssets,], args=args, method='SLSQP', bounds=bounds, constraints=constraints)
return opts
def EfficientReturn(meanReturns, covMatrix, riskFreeRate, targetReturn):
numAssets = len(meanReturns)
args = (meanReturns, covMatrix, riskFreeRate)
def getPortfolioReturn(weights):
return portfolio(weights, meanReturns, covMatrix, riskFreeRate)[0]
constraints = ({'type': 'eq', 'fun': lambda x: getPortfolioReturn(x) - targetReturn},
{'type': 'eq', 'fun': lambda x: np.sum(x) - 1})
bounds = tuple((0,1) for asset in range(numAssets))
opts = sco.minimize(getVolatility, numAssets*[1./numAssets,], args=args, method='SLSQP', bounds=bounds, constraints=constraints)
return opts
def EfficientFrontier(meanReturns, covMatrix, riskFreeRate, rangeOfReturns):
efficientPortfolios = []
for ret in rangeOfReturns:
efficientPortfolios.append(EfficientReturn(meanReturns, covMatrix, riskFreeRate, ret))
return efficientPortfolios
class theory:
def __init__( self, stocks, data, riskfreerate, n_portfolios ):
self.stocks = stocks
self.rfr = riskfreerate
self.data = data
self.n_portfolios = n_portfolios
self.returns = data.pct_change()[1:]
self.mean_ret = self.returns.mean()
self.cov_mat = self.returns.cov()
self.theory = self.do_theory()
self.results = self.theory[0]
self.max_sharpe_portfolio = self.theory[1]
self.min_volatility_portfolio = self.theory[2]
def do_theory( self ):
target = np.linspace(0.01, 0.30, self.n_portfolios)
eff_front = EfficientFrontier(self.mean_ret, self.cov_mat, self.rfr, target)
x = np.array([p['fun'] for p in eff_front])
df = pd.DataFrame({'Volatility':x, 'Returns':target})
# Create max_sharpe_port
max_sh = MaxSharpeRatio(self.mean_ret, self.cov_mat, self.rfr)['x']
x = dict(zip(self.stocks,max_sh))
port_max = portfolio(max_sh,self.mean_ret, self.cov_mat, self.rfr)
y = {'Returns' : port_max[0], 'Sharpe Ratio' : port_max[2], 'Volatility' : port_max[1]}
z = {**x, **y}
max_sharpe_port = pd.Series(z)
# Create min_vol_port
min_vo = MinVolatility(self.mean_ret, self.cov_mat, self.rfr)['x']
x_vo = dict(zip(self.stocks,min_vo))
port_min = portfolio(min_vo,self.mean_ret, self.cov_mat, self.rfr)
y_vo = {'Returns' : port_min[0], 'Sharpe Ratio' : port_min[2], 'Volatility' : port_min[1]}
z_vo = {**x_vo, **y_vo}
min_vol_port = pd.Series(z_vo)
return df, max_sharpe_port, min_vol_port
def plot( self ):
df = self.theory[0]
df = df.loc[df['Volatility'] < np.round(df['Volatility'].iloc[-1],7)]
max_sh = self.theory[1]
min_vol= self.theory[2]
# Scatter plot colored by Sharpe Ratio
plt.style.use('seaborn-dark')
fig, ax = plt.subplots(figsize=(10,7))
df.plot(ax= ax, kind='scatter',x='Volatility', y='Returns',edgecolors='black', grid=True, label = 'Theory')
# Maximum Sharpe Ratio
ax.scatter(x=max_sh['Volatility'],y=max_sh['Returns'],marker='o',c='r',s=100,label='Maximum Sharpe Ratio')
# Minimum variance
ax.scatter(x=min_vol['Volatility'],y=min_vol['Returns'],marker='o',c='b',s=100,label='Minimum Volatility')
plt.legend()
ax.set_xlabel('Volatility (Std. Deviation)', fontsize=15)
ax.set_ylabel('Expected Returns', fontsize=15)
ax.set_title('Efficient Frontier', fontsize=22)
plt.show()
def print( self ):
max_sh = self.theory[1]
min_vol= self.theory[2]
print('Maximum Sharpe Ratio:\n{}'.format(
max_sh[max_sh.index.tolist()[0:len(self.stocks)]].to_frame(name='Weights').T))
print('{}'.format(max_sh[max_sh.index.tolist()[len(self.stocks):]].to_frame(name='Results').T))
print('\nMinimum Volatility:\n{}'.format(
min_vol[min_vol.index.tolist()[0:len(self.stocks)]].to_frame(name='Weights').T))
print('{}'.format(min_vol[min_vol.index.tolist()[len(self.stocks):]].to_frame(name='Results').T))
#######################################
# END: Theory #
#######################################
#-------------------------------------#
# Plot All #
#-------------------------------------#
def plot_all( simulation, theory ):
# Scatter plot colored by Sharpe Ratio
plt.style.use('seaborn-dark')
fig, ax = plt.subplots(figsize=(10,7))
# Simulation
df = simulation.results
max_sh = simulation.max_sharpe_portfolio
min_vol= simulation.min_volatility_portfolio
df.plot(ax= ax, kind='scatter',x='Volatility', y='Returns', c='Sharpe Ratio', cmap='RdYlGn', edgecolors='black', grid=True, label = 'MC Simulation',alpha=0.5)
# Maximum Sharpe Ratio
ax.scatter(x=max_sh['Volatility'],y=max_sh['Returns'],marker='D',c='r',s=100,label='Maximum Sharpe Ratio (MC)')
# Minimum variance
ax.scatter(x=min_vol['Volatility'],y=min_vol['Returns'],marker='D',c='b',s=100,label='Minimum Volatility (MC)')
# Theory
df = theory.results
df = df.loc[df['Volatility'] < np.round(df['Volatility'].iloc[-1],7)]
max_sh = theory.max_sharpe_portfolio
min_vol= theory.min_volatility_portfolio
df.plot(ax= ax, kind='scatter',x='Volatility', y='Returns',edgecolors='black', label = 'Theory', grid=True)
# Maximum Sharpe Ratio
ax.scatter(x=max_sh['Volatility'],y=max_sh['Returns'],marker='o',c='r',s=100,label='Maximum Sharpe Ratio (theory)',alpha=0.5)
# Minimum variance
ax.scatter(x=min_vol['Volatility'],y=min_vol['Returns'],marker='o',c='b',s=100,label='Minimum Volatility (theory)',alpha=0.5)
plt.legend(loc=7)
ax.set_xlabel('Volatility (Std. Deviation)', fontsize=15)
ax.set_ylabel('Expected Returns', fontsize=15)
ax.set_title('Efficient Frontier', fontsize=22)
plt.show()
| [
"matplotlib.pylab.xticks",
"matplotlib.pylab.subplots",
"numpy.sqrt",
"pandas.read_csv",
"numpy.array",
"matplotlib.pylab.show",
"pandas.ewma",
"numpy.mean",
"seaborn.set",
"numpy.random.random",
"matplotlib.pylab.legend",
"matplotlib.pylab.title",
"pandas.DataFrame.from_dict",
"numpy.lins... | [((695, 718), 'seaborn.set', 'sns.set', ([], {'font_scale': '(1.5)'}), '(font_scale=1.5)\n', (702, 718), True, 'import seaborn as sns\n'), ((968, 1030), 'warnings.simplefilter', 'warnings.simplefilter', ([], {'action': '"""ignore"""', 'category': 'FutureWarning'}), "(action='ignore', category=FutureWarning)\n", (989, 1030), False, 'import warnings\n'), ((1031, 1119), 'warnings.filterwarnings', 'warnings.filterwarnings', ([], {'action': '"""ignore"""', 'module': '"""scipy"""', 'message': '"""^internal gelsd"""'}), "(action='ignore', module='scipy', message=\n '^internal gelsd')\n", (1054, 1119), False, 'import warnings\n'), ((1498, 1511), 'numpy.mean', 'np.mean', (['time'], {}), '(time)\n', (1505, 1511), True, 'import numpy as np\n'), ((1692, 1710), 'numpy.sqrt', 'np.sqrt', (['den_slope'], {}), '(den_slope)\n', (1699, 1710), True, 'import numpy as np\n'), ((8040, 8059), 'numpy.random.random', 'np.random.random', (['n'], {}), '(n)\n', (8056, 8059), True, 'import numpy as np\n'), ((11494, 11624), 'scipy.optimize.minimize', 'sco.minimize', (['negSharpeRatio', '(numAssets * [1.0 / numAssets])'], {'args': 'args', 'method': '"""SLSQP"""', 'bounds': 'bounds', 'constraints': 'constraints'}), "(negSharpeRatio, numAssets * [1.0 / numAssets], args=args,\n method='SLSQP', bounds=bounds, constraints=constraints)\n", (11506, 11624), True, 'import scipy.optimize as sco\n'), ((11891, 12020), 'scipy.optimize.minimize', 'sco.minimize', (['getVolatility', '(numAssets * [1.0 / numAssets])'], {'args': 'args', 'method': '"""SLSQP"""', 'bounds': 'bounds', 'constraints': 'constraints'}), "(getVolatility, numAssets * [1.0 / numAssets], args=args,\n method='SLSQP', bounds=bounds, constraints=constraints)\n", (11903, 12020), True, 'import scipy.optimize as sco\n'), ((12481, 12610), 'scipy.optimize.minimize', 'sco.minimize', (['getVolatility', '(numAssets * [1.0 / numAssets])'], {'args': 'args', 'method': '"""SLSQP"""', 'bounds': 'bounds', 'constraints': 'constraints'}), "(getVolatility, numAssets * [1.0 / numAssets], args=args,\n method='SLSQP', bounds=bounds, constraints=constraints)\n", (12493, 12610), True, 'import scipy.optimize as sco\n'), ((16010, 16039), 'matplotlib.pylab.style.use', 'plt.style.use', (['"""seaborn-dark"""'], {}), "('seaborn-dark')\n", (16023, 16039), True, 'import matplotlib.pylab as plt\n'), ((16051, 16080), 'matplotlib.pylab.subplots', 'plt.subplots', ([], {'figsize': '(10, 7)'}), '(figsize=(10, 7))\n', (16063, 16080), True, 'import matplotlib.pylab as plt\n'), ((17227, 17244), 'matplotlib.pylab.legend', 'plt.legend', ([], {'loc': '(7)'}), '(loc=7)\n', (17237, 17244), True, 'import matplotlib.pylab as plt\n'), ((17402, 17412), 'matplotlib.pylab.show', 'plt.show', ([], {}), '()\n', (17410, 17412), True, 'import matplotlib.pylab as plt\n'), ((2332, 2367), 'pandas.read_csv', 'pd.read_csv', (['self.file'], {'index_col': '(0)'}), '(self.file, index_col=0)\n', (2343, 2367), True, 'import pandas as pd\n'), ((3878, 3910), 'matplotlib.pylab.subplots', 'plt.subplots', (['(1)'], {'figsize': '(10, 7)'}), '(1, figsize=(10, 7))\n', (3890, 3910), True, 'import matplotlib.pylab as plt\n'), ((3956, 3974), 'matplotlib.pylab.xlabel', 'plt.xlabel', (['"""Year"""'], {}), "('Year')\n", (3966, 3974), True, 'import matplotlib.pylab as plt\n'), ((3977, 3995), 'matplotlib.pylab.ylabel', 'plt.ylabel', (['Series'], {}), '(Series)\n', (3987, 3995), True, 'import matplotlib.pylab as plt\n'), ((4041, 4051), 'matplotlib.pylab.show', 'plt.show', ([], {}), '()\n', (4049, 4051), True, 'import matplotlib.pylab as plt\n'), ((5684, 5715), 'sklearn.linear_model.LinearRegression', 'linear_model.LinearRegression', ([], {}), '()\n', (5713, 5715), False, 'from sklearn import linear_model\n'), ((6073, 6105), 'matplotlib.pylab.subplots', 'plt.subplots', (['(1)'], {'figsize': '(10, 7)'}), '(1, figsize=(10, 7))\n', (6085, 6105), True, 'import matplotlib.pylab as plt\n'), ((7510, 7522), 'numpy.sqrt', 'np.sqrt', (['(250)'], {}), '(250)\n', (7517, 7522), True, 'import numpy as np\n'), ((9407, 9438), 'pandas.DataFrame.from_dict', 'pd.DataFrame.from_dict', (['results'], {}), '(results)\n', (9429, 9438), True, 'import pandas as pd\n'), ((9737, 9766), 'matplotlib.pylab.style.use', 'plt.style.use', (['"""seaborn-dark"""'], {}), "('seaborn-dark')\n", (9750, 9766), True, 'import matplotlib.pylab as plt\n'), ((9779, 9808), 'matplotlib.pylab.subplots', 'plt.subplots', ([], {'figsize': '(10, 7)'}), '(figsize=(10, 7))\n', (9791, 9808), True, 'import matplotlib.pylab as plt\n'), ((10225, 10237), 'matplotlib.pylab.legend', 'plt.legend', ([], {}), '()\n', (10235, 10237), True, 'import matplotlib.pylab as plt\n'), ((10399, 10409), 'matplotlib.pylab.show', 'plt.show', ([], {}), '()\n', (10407, 10409), True, 'import matplotlib.pylab as plt\n'), ((13389, 13430), 'numpy.linspace', 'np.linspace', (['(0.01)', '(0.3)', 'self.n_portfolios'], {}), '(0.01, 0.3, self.n_portfolios)\n', (13400, 13430), True, 'import numpy as np\n'), ((13517, 13556), 'numpy.array', 'np.array', (["[p['fun'] for p in eff_front]"], {}), "([p['fun'] for p in eff_front])\n", (13525, 13556), True, 'import numpy as np\n'), ((13564, 13614), 'pandas.DataFrame', 'pd.DataFrame', (["{'Volatility': x, 'Returns': target}"], {}), "({'Volatility': x, 'Returns': target})\n", (13576, 13614), True, 'import pandas as pd\n'), ((13942, 13954), 'pandas.Series', 'pd.Series', (['z'], {}), '(z)\n', (13951, 13954), True, 'import pandas as pd\n'), ((14292, 14307), 'pandas.Series', 'pd.Series', (['z_vo'], {}), '(z_vo)\n', (14301, 14307), True, 'import pandas as pd\n'), ((14560, 14589), 'matplotlib.pylab.style.use', 'plt.style.use', (['"""seaborn-dark"""'], {}), "('seaborn-dark')\n", (14573, 14589), True, 'import matplotlib.pylab as plt\n'), ((14602, 14631), 'matplotlib.pylab.subplots', 'plt.subplots', ([], {'figsize': '(10, 7)'}), '(figsize=(10, 7))\n', (14614, 14631), True, 'import matplotlib.pylab as plt\n'), ((15007, 15019), 'matplotlib.pylab.legend', 'plt.legend', ([], {}), '()\n', (15017, 15019), True, 'import matplotlib.pylab as plt\n'), ((15181, 15191), 'matplotlib.pylab.show', 'plt.show', ([], {}), '()\n', (15189, 15191), True, 'import matplotlib.pylab as plt\n'), ((2829, 2857), 'numpy.abs', 'np.abs', (['(adf.DHigh - adf.DLow)'], {}), '(adf.DHigh - adf.DLow)\n', (2835, 2857), True, 'import numpy as np\n'), ((3136, 3170), 'pandas.ewma', 'pd.ewma', (['TR'], {'span': 'n', 'min_periods': 'n'}), '(TR, span=n, min_periods=n)\n', (3143, 3170), True, 'import pandas as pd\n'), ((4020, 4038), 'matplotlib.pylab.title', 'plt.title', (['args[0]'], {}), '(args[0])\n', (4029, 4038), True, 'import matplotlib.pylab as plt\n'), ((4941, 4979), 'statsmodels.formula.api.ols', 'sm.ols', ([], {'formula': '"""time ~ PATR"""', 'data': 'df'}), "(formula='time ~ PATR', data=df)\n", (4947, 4979), True, 'import statsmodels.formula.api as sm\n'), ((6257, 6280), 'matplotlib.pylab.xticks', 'plt.xticks', ([], {'rotation': '(30)'}), '(rotation=30)\n', (6267, 6280), True, 'import matplotlib.pylab as plt\n'), ((6284, 6296), 'matplotlib.pylab.legend', 'plt.legend', ([], {}), '()\n', (6294, 6296), True, 'import matplotlib.pylab as plt\n'), ((6300, 6310), 'matplotlib.pylab.show', 'plt.show', ([], {}), '()\n', (6308, 6310), True, 'import matplotlib.pylab as plt\n'), ((6698, 6751), 'sklearn.model_selection.train_test_split', 'train_test_split', (['t', 'x'], {'test_size': '(0.0)', 'random_state': '(1)'}), '(t, x, test_size=0.0, random_state=1)\n', (6714, 6751), False, 'from sklearn.model_selection import train_test_split\n'), ((16700, 16738), 'numpy.round', 'np.round', (["df['Volatility'].iloc[-1]", '(7)'], {}), "(df['Volatility'].iloc[-1], 7)\n", (16708, 16738), True, 'import numpy as np\n'), ((6513, 6536), 'matplotlib.pylab.xticks', 'plt.xticks', ([], {'rotation': '(30)'}), '(rotation=30)\n', (6523, 6536), True, 'import matplotlib.pylab as plt\n'), ((6541, 6553), 'matplotlib.pylab.legend', 'plt.legend', ([], {}), '()\n', (6551, 6553), True, 'import matplotlib.pylab as plt\n'), ((6558, 6568), 'matplotlib.pylab.show', 'plt.show', ([], {}), '()\n', (6566, 6568), True, 'import matplotlib.pylab as plt\n'), ((11416, 11425), 'numpy.sum', 'np.sum', (['x'], {}), '(x)\n', (11422, 11425), True, 'import numpy as np\n'), ((11813, 11822), 'numpy.sum', 'np.sum', (['x'], {}), '(x)\n', (11819, 11822), True, 'import numpy as np\n'), ((14426, 14464), 'numpy.round', 'np.round', (["df['Volatility'].iloc[-1]", '(7)'], {}), "(df['Volatility'].iloc[-1], 7)\n", (14434, 14464), True, 'import numpy as np\n'), ((2990, 3021), 'pandas.concat', 'pd.concat', (['[s1, s2, s3]'], {'axis': '(1)'}), '([s1, s2, s3], axis=1)\n', (2999, 3021), True, 'import pandas as pd\n'), ((4880, 4920), 'pandas.Series', 'pd.Series', (['y'], {'index': 'X.index', 'name': '"""time"""'}), "(y, index=X.index, name='time')\n", (4889, 4920), True, 'import pandas as pd\n'), ((12404, 12413), 'numpy.sum', 'np.sum', (['x'], {}), '(x)\n', (12410, 12413), True, 'import numpy as np\n')] |
# Generated by Django 3.2.8 on 2021-10-13 11:30
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('collector', '0023_node_benchmark_score'),
]
operations = [
migrations.AddField(
model_name='node',
name='benchmarked_at',
field=models.DateTimeField(blank=True, null=True),
),
]
| [
"django.db.models.DateTimeField"
] | [((344, 387), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (364, 387), False, 'from django.db import migrations, models\n')] |
import numpy as np
import matplotlib.pyplot as plt
from scipy.linalg import norm as lpnorm
if __name__ == "__main__":
N = 1000 # Precision
p = 0.5 # p-norm
# Discretize unit-circle
angles = np.linspace(0, 2*np.pi, N)
# Create unit-circle points
points = np.stack((np.cos(angles), np.sin(angles)), 1)
# Normalize them with p-norm
points = (points.T / np.array([lpnorm(point, p) for point in points])).T
# Plot
plt.plot(points[:, 0], points[:, 1], linestyle='-')
plt.gca().set_aspect('equal', adjustable='box')
plt.gca().set_title('Unit Circle: p = ' + str(p))
plt.show() | [
"matplotlib.pyplot.gca",
"matplotlib.pyplot.plot",
"numpy.linspace",
"numpy.cos",
"scipy.linalg.norm",
"numpy.sin",
"matplotlib.pyplot.show"
] | [((215, 243), 'numpy.linspace', 'np.linspace', (['(0)', '(2 * np.pi)', 'N'], {}), '(0, 2 * np.pi, N)\n', (226, 243), True, 'import numpy as np\n'), ((460, 511), 'matplotlib.pyplot.plot', 'plt.plot', (['points[:, 0]', 'points[:, 1]'], {'linestyle': '"""-"""'}), "(points[:, 0], points[:, 1], linestyle='-')\n", (468, 511), True, 'import matplotlib.pyplot as plt\n'), ((622, 632), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (630, 632), True, 'import matplotlib.pyplot as plt\n'), ((297, 311), 'numpy.cos', 'np.cos', (['angles'], {}), '(angles)\n', (303, 311), True, 'import numpy as np\n'), ((313, 327), 'numpy.sin', 'np.sin', (['angles'], {}), '(angles)\n', (319, 327), True, 'import numpy as np\n'), ((516, 525), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (523, 525), True, 'import matplotlib.pyplot as plt\n'), ((568, 577), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (575, 577), True, 'import matplotlib.pyplot as plt\n'), ((402, 418), 'scipy.linalg.norm', 'lpnorm', (['point', 'p'], {}), '(point, p)\n', (408, 418), True, 'from scipy.linalg import norm as lpnorm\n')] |
"""Test state getters for retrieving motion planning views of state."""
import pytest
from decoy import Decoy
from dataclasses import dataclass, field
from typing import Optional
from opentrons.types import Point, MountType
from opentrons.hardware_control.types import CriticalPoint
from opentrons.protocols.geometry.planning import MoveType, get_waypoints
from opentrons.protocol_engine import errors
from opentrons.protocol_engine.state import PipetteData, PipetteLocationData
from opentrons.protocol_engine.state.labware import LabwareView
from opentrons.protocol_engine.state.pipettes import PipetteView
from opentrons.protocol_engine.state.geometry import GeometryView
from opentrons.protocol_engine.state.motion import MotionView
from opentrons.protocol_engine.types import (
DeckLocation,
WellLocation,
WellOrigin,
PipetteName,
)
@pytest.fixture
def subject(
labware_view: LabwareView,
pipette_view: PipetteView,
geometry_view: GeometryView,
) -> MotionView:
"""Get a MotionView with its dependencies mocked out."""
return MotionView(
labware_view=labware_view,
pipette_view=pipette_view,
geometry_view=geometry_view,
)
def test_get_pipette_location_with_no_current_location(
decoy: Decoy,
pipette_view: PipetteView,
subject: MotionView,
) -> None:
"""It should return mount and critical_point=None if no location."""
decoy.when(pipette_view.get_current_deck_location()).then_return(None)
decoy.when(pipette_view.get_pipette_data_by_id("pipette-id")).then_return(
PipetteData(
mount=MountType.LEFT,
pipette_name=PipetteName.P300_SINGLE,
)
)
result = subject.get_pipette_location("pipette-id")
assert result == PipetteLocationData(mount=MountType.LEFT, critical_point=None)
def test_get_pipette_location_with_current_location_with_quirks(
decoy: Decoy,
labware_view: LabwareView,
pipette_view: PipetteView,
subject: MotionView,
) -> None:
"""It should return cp=XY_CENTER if location labware has center quirk."""
decoy.when(pipette_view.get_current_deck_location()).then_return(
DeckLocation(pipette_id="pipette-id", labware_id="reservoir-id", well_name="A1")
)
decoy.when(pipette_view.get_pipette_data_by_id("pipette-id")).then_return(
PipetteData(
mount=MountType.RIGHT,
pipette_name=PipetteName.P300_SINGLE,
)
)
decoy.when(
labware_view.get_labware_has_quirk(
"reservoir-id",
"centerMultichannelOnWells",
)
).then_return(True)
result = subject.get_pipette_location("pipette-id")
assert result == PipetteLocationData(
mount=MountType.RIGHT,
critical_point=CriticalPoint.XY_CENTER,
)
def test_get_pipette_location_with_current_location_different_pipette(
decoy: Decoy,
labware_view: LabwareView,
pipette_view: PipetteView,
subject: MotionView,
) -> None:
"""It should return mount and cp=None if location used other pipette."""
decoy.when(pipette_view.get_current_deck_location()).then_return(
DeckLocation(
pipette_id="other-pipette-id",
labware_id="reservoir-id",
well_name="A1",
)
)
decoy.when(pipette_view.get_pipette_data_by_id("pipette-id")).then_return(
PipetteData(
mount=MountType.LEFT,
pipette_name=PipetteName.P300_SINGLE,
)
)
decoy.when(
labware_view.get_labware_has_quirk(
"reservoir-id",
"centerMultichannelOnWells",
)
).then_return(False)
result = subject.get_pipette_location("pipette-id")
assert result == PipetteLocationData(
mount=MountType.LEFT,
critical_point=None,
)
def test_get_pipette_location_override_current_location(
decoy: Decoy,
labware_view: LabwareView,
pipette_view: PipetteView,
subject: MotionView,
) -> None:
"""It should calculate pipette location from a passed in deck location."""
current_location = DeckLocation(
pipette_id="pipette-id",
labware_id="reservoir-id",
well_name="A1",
)
decoy.when(pipette_view.get_pipette_data_by_id("pipette-id")).then_return(
PipetteData(
mount=MountType.RIGHT,
pipette_name=PipetteName.P300_SINGLE,
)
)
decoy.when(
labware_view.get_labware_has_quirk(
"reservoir-id",
"centerMultichannelOnWells",
)
).then_return(True)
result = subject.get_pipette_location(
pipette_id="pipette-id",
current_location=current_location,
)
assert result == PipetteLocationData(
mount=MountType.RIGHT,
critical_point=CriticalPoint.XY_CENTER,
)
@dataclass(frozen=True)
class WaypointSpec:
"""Spec data for testing the get_movement_waypoints selector."""
name: str
expected_move_type: MoveType
pipette_id: str = "pipette-id"
labware_id: str = "labware-id"
well_name: str = "A1"
well_location: Optional[WellLocation] = None
origin: Point = field(default_factory=lambda: Point(1, 2, 3))
dest: Point = field(default_factory=lambda: Point(4, 5, 6))
origin_cp: Optional[CriticalPoint] = None
location: Optional[DeckLocation] = None
expected_dest_cp: Optional[CriticalPoint] = None
has_center_multichannel_quirk: bool = False
labware_z: Optional[float] = None
all_labware_z: Optional[float] = None
max_travel_z: float = 50
# TODO(mc, 2021-01-14): these tests probably need to be rethought; this fixture
# is impossible to reason with. The test is really just trying to be a collaborator
# test for the `get_waypoints` function, so we should rewrite it as such.
@pytest.mark.parametrize(
"spec",
[
WaypointSpec(
name="General arc if moving from unknown location",
all_labware_z=20,
expected_move_type=MoveType.GENERAL_ARC,
),
WaypointSpec(
name="General arc if moving from other labware",
location=DeckLocation(
pipette_id="pipette-id",
labware_id="other-labware-id",
well_name="A1",
),
all_labware_z=20,
expected_move_type=MoveType.GENERAL_ARC,
),
WaypointSpec(
name="In-labware arc if moving to same labware",
location=DeckLocation(
pipette_id="pipette-id",
labware_id="labware-id",
well_name="B2",
),
labware_z=10,
expected_move_type=MoveType.IN_LABWARE_ARC,
),
WaypointSpec(
name="General arc if moving to same labware with different pipette",
location=DeckLocation(
pipette_id="other-pipette-id",
labware_id="labware-id",
well_name="A1",
),
all_labware_z=20,
expected_move_type=MoveType.GENERAL_ARC,
),
WaypointSpec(
name="Direct movement from well to same well",
location=DeckLocation(
pipette_id="pipette-id",
labware_id="labware-id",
well_name="A1",
),
labware_z=10,
expected_move_type=MoveType.DIRECT,
),
WaypointSpec(
name="General arc with XY_CENTER destination CP",
has_center_multichannel_quirk=True,
all_labware_z=20,
expected_move_type=MoveType.GENERAL_ARC,
expected_dest_cp=CriticalPoint.XY_CENTER,
),
WaypointSpec(
name="General arc with a well offset",
all_labware_z=20,
well_location=WellLocation(origin=WellOrigin.TOP, offset=(0, 0, 1)),
expected_move_type=MoveType.GENERAL_ARC,
),
# TODO(mc, 2021-01-08): add test for override current location
],
)
def test_get_movement_waypoints(
decoy: Decoy,
labware_view: LabwareView,
pipette_view: PipetteView,
geometry_view: GeometryView,
subject: MotionView,
spec: WaypointSpec,
) -> None:
"""It should calculate the correct set of waypoints for a move."""
decoy.when(
labware_view.get_labware_has_quirk(
spec.labware_id,
"centerMultichannelOnWells",
)
).then_return(spec.has_center_multichannel_quirk)
if spec.labware_z is not None:
min_travel_z = spec.labware_z
decoy.when(geometry_view.get_labware_highest_z(spec.labware_id)).then_return(
spec.labware_z
)
elif spec.all_labware_z is not None:
min_travel_z = spec.all_labware_z
decoy.when(geometry_view.get_all_labware_highest_z()).then_return(
spec.all_labware_z
)
else:
assert False, "One of spec.labware_z or all_labware_z must be defined."
decoy.when(
geometry_view.get_well_position(
spec.labware_id,
spec.well_name,
spec.well_location,
)
).then_return(spec.dest)
decoy.when(pipette_view.get_current_deck_location()).then_return(spec.location)
result = subject.get_movement_waypoints(
pipette_id=spec.pipette_id,
labware_id=spec.labware_id,
well_name=spec.well_name,
well_location=spec.well_location,
origin=spec.origin,
origin_cp=spec.origin_cp,
max_travel_z=spec.max_travel_z,
)
expected = get_waypoints(
move_type=spec.expected_move_type,
origin=spec.origin,
origin_cp=spec.origin_cp,
max_travel_z=spec.max_travel_z,
min_travel_z=min_travel_z,
dest=spec.dest,
dest_cp=spec.expected_dest_cp,
xy_waypoints=[],
)
assert result == expected
def test_get_movement_waypoints_raises(
decoy: Decoy,
pipette_view: PipetteView,
geometry_view: GeometryView,
subject: MotionView,
) -> None:
"""It should raise FailedToPlanMoveError if get_waypoints raises."""
decoy.when(pipette_view.get_current_deck_location()).then_return(None)
decoy.when(geometry_view.get_well_position("labware-id", "A1", None)).then_return(
Point(4, 5, 6)
)
with pytest.raises(errors.FailedToPlanMoveError, match="out of bounds"):
subject.get_movement_waypoints(
pipette_id="pipette-id",
labware_id="labware-id",
well_name="A1",
well_location=None,
origin=Point(1, 2, 3),
origin_cp=None,
# this max_travel_z is too low and will induce failure
max_travel_z=1,
)
| [
"opentrons.protocols.geometry.planning.get_waypoints",
"opentrons.protocol_engine.types.DeckLocation",
"dataclasses.dataclass",
"opentrons.protocol_engine.state.motion.MotionView",
"opentrons.protocol_engine.state.PipetteData",
"pytest.raises",
"opentrons.types.Point",
"opentrons.protocol_engine.state... | [((4834, 4856), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (4843, 4856), False, 'from dataclasses import dataclass, field\n'), ((1071, 1168), 'opentrons.protocol_engine.state.motion.MotionView', 'MotionView', ([], {'labware_view': 'labware_view', 'pipette_view': 'pipette_view', 'geometry_view': 'geometry_view'}), '(labware_view=labware_view, pipette_view=pipette_view,\n geometry_view=geometry_view)\n', (1081, 1168), False, 'from opentrons.protocol_engine.state.motion import MotionView\n'), ((4099, 4184), 'opentrons.protocol_engine.types.DeckLocation', 'DeckLocation', ([], {'pipette_id': '"""pipette-id"""', 'labware_id': '"""reservoir-id"""', 'well_name': '"""A1"""'}), "(pipette_id='pipette-id', labware_id='reservoir-id', well_name='A1'\n )\n", (4111, 4184), False, 'from opentrons.protocol_engine.types import DeckLocation, WellLocation, WellOrigin, PipetteName\n'), ((9575, 9805), 'opentrons.protocols.geometry.planning.get_waypoints', 'get_waypoints', ([], {'move_type': 'spec.expected_move_type', 'origin': 'spec.origin', 'origin_cp': 'spec.origin_cp', 'max_travel_z': 'spec.max_travel_z', 'min_travel_z': 'min_travel_z', 'dest': 'spec.dest', 'dest_cp': 'spec.expected_dest_cp', 'xy_waypoints': '[]'}), '(move_type=spec.expected_move_type, origin=spec.origin,\n origin_cp=spec.origin_cp, max_travel_z=spec.max_travel_z, min_travel_z=\n min_travel_z, dest=spec.dest, dest_cp=spec.expected_dest_cp,\n xy_waypoints=[])\n', (9588, 9805), False, 'from opentrons.protocols.geometry.planning import MoveType, get_waypoints\n'), ((1575, 1646), 'opentrons.protocol_engine.state.PipetteData', 'PipetteData', ([], {'mount': 'MountType.LEFT', 'pipette_name': 'PipetteName.P300_SINGLE'}), '(mount=MountType.LEFT, pipette_name=PipetteName.P300_SINGLE)\n', (1586, 1646), False, 'from opentrons.protocol_engine.state import PipetteData, PipetteLocationData\n'), ((1767, 1829), 'opentrons.protocol_engine.state.PipetteLocationData', 'PipetteLocationData', ([], {'mount': 'MountType.LEFT', 'critical_point': 'None'}), '(mount=MountType.LEFT, critical_point=None)\n', (1786, 1829), False, 'from opentrons.protocol_engine.state import PipetteData, PipetteLocationData\n'), ((2169, 2254), 'opentrons.protocol_engine.types.DeckLocation', 'DeckLocation', ([], {'pipette_id': '"""pipette-id"""', 'labware_id': '"""reservoir-id"""', 'well_name': '"""A1"""'}), "(pipette_id='pipette-id', labware_id='reservoir-id', well_name='A1'\n )\n", (2181, 2254), False, 'from opentrons.protocol_engine.types import DeckLocation, WellLocation, WellOrigin, PipetteName\n'), ((2344, 2416), 'opentrons.protocol_engine.state.PipetteData', 'PipetteData', ([], {'mount': 'MountType.RIGHT', 'pipette_name': 'PipetteName.P300_SINGLE'}), '(mount=MountType.RIGHT, pipette_name=PipetteName.P300_SINGLE)\n', (2355, 2416), False, 'from opentrons.protocol_engine.state import PipetteData, PipetteLocationData\n'), ((2701, 2788), 'opentrons.protocol_engine.state.PipetteLocationData', 'PipetteLocationData', ([], {'mount': 'MountType.RIGHT', 'critical_point': 'CriticalPoint.XY_CENTER'}), '(mount=MountType.RIGHT, critical_point=CriticalPoint.\n XY_CENTER)\n', (2720, 2788), False, 'from opentrons.protocol_engine.state import PipetteData, PipetteLocationData\n'), ((3151, 3241), 'opentrons.protocol_engine.types.DeckLocation', 'DeckLocation', ([], {'pipette_id': '"""other-pipette-id"""', 'labware_id': '"""reservoir-id"""', 'well_name': '"""A1"""'}), "(pipette_id='other-pipette-id', labware_id='reservoir-id',\n well_name='A1')\n", (3163, 3241), False, 'from opentrons.protocol_engine.types import DeckLocation, WellLocation, WellOrigin, PipetteName\n'), ((3379, 3450), 'opentrons.protocol_engine.state.PipetteData', 'PipetteData', ([], {'mount': 'MountType.LEFT', 'pipette_name': 'PipetteName.P300_SINGLE'}), '(mount=MountType.LEFT, pipette_name=PipetteName.P300_SINGLE)\n', (3390, 3450), False, 'from opentrons.protocol_engine.state import PipetteData, PipetteLocationData\n'), ((3736, 3798), 'opentrons.protocol_engine.state.PipetteLocationData', 'PipetteLocationData', ([], {'mount': 'MountType.LEFT', 'critical_point': 'None'}), '(mount=MountType.LEFT, critical_point=None)\n', (3755, 3798), False, 'from opentrons.protocol_engine.state import PipetteData, PipetteLocationData\n'), ((4299, 4371), 'opentrons.protocol_engine.state.PipetteData', 'PipetteData', ([], {'mount': 'MountType.RIGHT', 'pipette_name': 'PipetteName.P300_SINGLE'}), '(mount=MountType.RIGHT, pipette_name=PipetteName.P300_SINGLE)\n', (4310, 4371), False, 'from opentrons.protocol_engine.state import PipetteData, PipetteLocationData\n'), ((4725, 4812), 'opentrons.protocol_engine.state.PipetteLocationData', 'PipetteLocationData', ([], {'mount': 'MountType.RIGHT', 'critical_point': 'CriticalPoint.XY_CENTER'}), '(mount=MountType.RIGHT, critical_point=CriticalPoint.\n XY_CENTER)\n', (4744, 4812), False, 'from opentrons.protocol_engine.state import PipetteData, PipetteLocationData\n'), ((10298, 10312), 'opentrons.types.Point', 'Point', (['(4)', '(5)', '(6)'], {}), '(4, 5, 6)\n', (10303, 10312), False, 'from opentrons.types import Point, MountType\n'), ((10329, 10395), 'pytest.raises', 'pytest.raises', (['errors.FailedToPlanMoveError'], {'match': '"""out of bounds"""'}), "(errors.FailedToPlanMoveError, match='out of bounds')\n", (10342, 10395), False, 'import pytest\n'), ((5189, 5203), 'opentrons.types.Point', 'Point', (['(1)', '(2)', '(3)'], {}), '(1, 2, 3)\n', (5194, 5203), False, 'from opentrons.types import Point, MountType\n'), ((5253, 5267), 'opentrons.types.Point', 'Point', (['(4)', '(5)', '(6)'], {}), '(4, 5, 6)\n', (5258, 5267), False, 'from opentrons.types import Point, MountType\n'), ((6137, 6225), 'opentrons.protocol_engine.types.DeckLocation', 'DeckLocation', ([], {'pipette_id': '"""pipette-id"""', 'labware_id': '"""other-labware-id"""', 'well_name': '"""A1"""'}), "(pipette_id='pipette-id', labware_id='other-labware-id',\n well_name='A1')\n", (6149, 6225), False, 'from opentrons.protocol_engine.types import DeckLocation, WellLocation, WellOrigin, PipetteName\n'), ((6484, 6562), 'opentrons.protocol_engine.types.DeckLocation', 'DeckLocation', ([], {'pipette_id': '"""pipette-id"""', 'labware_id': '"""labware-id"""', 'well_name': '"""B2"""'}), "(pipette_id='pipette-id', labware_id='labware-id', well_name='B2')\n", (6496, 6562), False, 'from opentrons.protocol_engine.types import DeckLocation, WellLocation, WellOrigin, PipetteName\n'), ((6844, 6932), 'opentrons.protocol_engine.types.DeckLocation', 'DeckLocation', ([], {'pipette_id': '"""other-pipette-id"""', 'labware_id': '"""labware-id"""', 'well_name': '"""A1"""'}), "(pipette_id='other-pipette-id', labware_id='labware-id',\n well_name='A1')\n", (6856, 6932), False, 'from opentrons.protocol_engine.types import DeckLocation, WellLocation, WellOrigin, PipetteName\n'), ((7189, 7267), 'opentrons.protocol_engine.types.DeckLocation', 'DeckLocation', ([], {'pipette_id': '"""pipette-id"""', 'labware_id': '"""labware-id"""', 'well_name': '"""A1"""'}), "(pipette_id='pipette-id', labware_id='labware-id', well_name='A1')\n", (7201, 7267), False, 'from opentrons.protocol_engine.types import DeckLocation, WellLocation, WellOrigin, PipetteName\n'), ((7826, 7879), 'opentrons.protocol_engine.types.WellLocation', 'WellLocation', ([], {'origin': 'WellOrigin.TOP', 'offset': '(0, 0, 1)'}), '(origin=WellOrigin.TOP, offset=(0, 0, 1))\n', (7838, 7879), False, 'from opentrons.protocol_engine.types import DeckLocation, WellLocation, WellOrigin, PipetteName\n'), ((10590, 10604), 'opentrons.types.Point', 'Point', (['(1)', '(2)', '(3)'], {}), '(1, 2, 3)\n', (10595, 10604), False, 'from opentrons.types import Point, MountType\n')] |
import numpy as np
from .. import T
from ..layer import ShapedLayer
from ..initialization import initialize_weights
from .full import Linear
from .. import stats
__all__ = ['Gaussian', 'Bernoulli', 'IdentityVariance']
class Gaussian(Linear):
def __init__(self, *args, **kwargs):
self.cov_type = kwargs.pop('cov_type', 'diagonal')
self.min_stdev = kwargs.pop('min_stdev', 1e-2)
super(Gaussian, self).__init__(*args, **kwargs)
assert not self.elementwise
def initialize(self):
if not self.elementwise:
dim_in, dim_out = self.get_dim_in()[-1], self.get_dim_out()[-1]
left = initialize_weights(self.initialization, [dim_in, dim_out // 2])
right = T.zeros([dim_in, dim_out // 2])
self.create_parameter('W', [dim_in, dim_out], initial_value=(
T.concatenate([
right, left
], -1)
))
self.create_parameter('b', [dim_out], initial_value=np.zeros([dim_out]))
def get_dim_out(self):
return [self.dim_out[0] * 2]
def activate(self, X):
if self.cov_type == 'diagonal':
scale_diag, mu = T.split(X, 2, axis=-1)
if hasattr(self, 'min_stdev'):
scale_diag = T.softplus(scale_diag) + self.min_stdev
else:
scale_diag = T.softplus(scale_diag) + 1e-5
return stats.GaussianScaleDiag([scale_diag, mu], parameter_type='regular')
raise Exception("Undefined covariance type: %s" % self.cov_type)
def __str__(self):
return "Gaussian(%s)" % self.dim_out
class Bernoulli(Linear):
def __init__(self, *args, **kwargs):
self.parameter_type = kwargs.pop('parameter_type', 'natural')
super(Bernoulli, self).__init__(*args, **kwargs)
def activate(self, X):
if self.elementwise:
return stats.Bernoulli(X, parameter_type=self.parameter_type)
return stats.Bernoulli(X, parameter_type=self.parameter_type)
def __str__(self):
return "Bernoulli(%s)" % self.dim_out
class IdentityVariance(ShapedLayer):
def __init__(self, variance=1e-4, *args, **kwargs):
self.variance = variance
super(IdentityVariance, self).__init__(*args, **kwargs)
def initialize(self):
pass
def get_parameters(self):
return []
def infer_shape(self, shape):
if shape is None: return
if self.elementwise:
self.dim_in = shape
self.dim_out = shape
return
if self.dim_in is None:
self.dim_in = shape
def forward(self, X):
return stats.GaussianScaleDiag([np.sqrt(self.variance) * T.ones_like(X), X])
| [
"numpy.zeros",
"numpy.sqrt"
] | [((1005, 1024), 'numpy.zeros', 'np.zeros', (['[dim_out]'], {}), '([dim_out])\n', (1013, 1024), True, 'import numpy as np\n'), ((2690, 2712), 'numpy.sqrt', 'np.sqrt', (['self.variance'], {}), '(self.variance)\n', (2697, 2712), True, 'import numpy as np\n')] |
from django import forms
from pbs.implementation.models import BurningPrescription, EdgingPlan, LightingSequence
from pbs.forms import HelperModelForm, WideTextarea
class BurningPrescriptionForm(forms.ModelForm):
class Meta:
model = BurningPrescription
fields = ('prescription', 'fuel_type', 'scorch', 'grassland_curing_min', 'grassland_curing_max')
class EdgingPlanForm(HelperModelForm):
def __init__(self, *args, **kwargs):
super(EdgingPlanForm, self).__init__(*args, **kwargs)
self.fields['location'].widget = WideTextarea()
self.fields['strategies'].widget = WideTextarea()
class Meta:
model = EdgingPlan
class LightingSequenceForm(HelperModelForm):
def __init__(self, *args, **kwargs):
super(LightingSequenceForm, self).__init__(*args, **kwargs)
self.fields['cellname'].widget.attrs.update({'class': 'span5'})
self.fields['strategies'].widget = WideTextarea()
self.fields['fuel_description'].widget = WideTextarea()
self.fields['resources'].widget = WideTextarea()
self.fields['wind_dir'].widget = WideTextarea()
self.fields['ffdi_min'].required = False
self.fields['ffdi_max'].required = False
self.fields['grassland_curing_min'].required = False
self.fields['grassland_curing_max'].required = False
self.fields['gfdi_min'].required = False
self.fields['gfdi_max'].required = False
self.fields['ros_min'].required = False
self.fields['ros_max'].required = False
self.fields['wind_min'].required = False
self.fields['wind_max'].required = False
class Meta:
model = LightingSequence
| [
"pbs.forms.WideTextarea"
] | [((559, 573), 'pbs.forms.WideTextarea', 'WideTextarea', ([], {}), '()\n', (571, 573), False, 'from pbs.forms import HelperModelForm, WideTextarea\n'), ((617, 631), 'pbs.forms.WideTextarea', 'WideTextarea', ([], {}), '()\n', (629, 631), False, 'from pbs.forms import HelperModelForm, WideTextarea\n'), ((948, 962), 'pbs.forms.WideTextarea', 'WideTextarea', ([], {}), '()\n', (960, 962), False, 'from pbs.forms import HelperModelForm, WideTextarea\n'), ((1012, 1026), 'pbs.forms.WideTextarea', 'WideTextarea', ([], {}), '()\n', (1024, 1026), False, 'from pbs.forms import HelperModelForm, WideTextarea\n'), ((1069, 1083), 'pbs.forms.WideTextarea', 'WideTextarea', ([], {}), '()\n', (1081, 1083), False, 'from pbs.forms import HelperModelForm, WideTextarea\n'), ((1125, 1139), 'pbs.forms.WideTextarea', 'WideTextarea', ([], {}), '()\n', (1137, 1139), False, 'from pbs.forms import HelperModelForm, WideTextarea\n')] |
# DExTer : Debugging Experience Tester
# ~~~~~~ ~ ~~ ~ ~~
#
# Copyright (c) 2018 by SN Systems Ltd., Sony Interactive Entertainment Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""List debuggers tool."""
from dex.debugger.Debuggers import add_debugger_tool_arguments1
from dex.debugger.Debuggers import handle_debugger_tool_options1
from dex.debugger.Debuggers import Debuggers
from dex.tools import ToolBase
from dex.utils import Timer
from dex.utils.Exceptions import DebuggerException, Error
class Tool(ToolBase):
"""List all of the potential debuggers that DExTer knows about and whether
there is currently a valid interface available for them.
"""
@property
def name(self):
return 'DExTer list debuggers'
def add_tool_arguments(self, parser, defaults):
parser.description = Tool.__doc__
add_debugger_tool_arguments1(parser, defaults)
def handle_options(self, defaults):
handle_debugger_tool_options1(self.context, defaults)
def go(self):
with Timer('list debuggers'):
try:
Debuggers(self.context).list()
except DebuggerException as e:
raise Error(e)
return 0
| [
"dex.utils.Exceptions.Error",
"dex.utils.Timer",
"dex.debugger.Debuggers.Debuggers",
"dex.debugger.Debuggers.add_debugger_tool_arguments1",
"dex.debugger.Debuggers.handle_debugger_tool_options1"
] | [((1883, 1929), 'dex.debugger.Debuggers.add_debugger_tool_arguments1', 'add_debugger_tool_arguments1', (['parser', 'defaults'], {}), '(parser, defaults)\n', (1911, 1929), False, 'from dex.debugger.Debuggers import add_debugger_tool_arguments1\n'), ((1979, 2032), 'dex.debugger.Debuggers.handle_debugger_tool_options1', 'handle_debugger_tool_options1', (['self.context', 'defaults'], {}), '(self.context, defaults)\n', (2008, 2032), False, 'from dex.debugger.Debuggers import handle_debugger_tool_options1\n'), ((2065, 2088), 'dex.utils.Timer', 'Timer', (['"""list debuggers"""'], {}), "('list debuggers')\n", (2070, 2088), False, 'from dex.utils import Timer\n'), ((2219, 2227), 'dex.utils.Exceptions.Error', 'Error', (['e'], {}), '(e)\n', (2224, 2227), False, 'from dex.utils.Exceptions import DebuggerException, Error\n'), ((2123, 2146), 'dex.debugger.Debuggers.Debuggers', 'Debuggers', (['self.context'], {}), '(self.context)\n', (2132, 2146), False, 'from dex.debugger.Debuggers import Debuggers\n')] |
import pytest
from coloring import create_color
from coloring.consts import *
def test_create_color():
text = "Hello"
mycolor = create_color(120, 160, 200)
colored_text = mycolor(text)
assert colored_text == f"{CSI}38;2;120;160;200m{text}{RESET_COLOR}"
mycolor = create_color(128, 128, 128)
colored_text = mycolor(text)
assert colored_text == f"{CSI}38;2;128;128;128m{text}{RESET_COLOR}"
def test_create_color_bold():
text = "Hello"
mycolor = create_color(120, 160, 200, s="b")
colored_text = mycolor(text)
assert (
colored_text
== f"{CSI}38;2;120;160;200m{BOLD}{text}{RESET_BOLD_AND_DIM}{RESET_COLOR}"
)
def test_create_color_underline():
text = "Hello"
mycolor = create_color(120, 160, 200, s="u")
colored_text = mycolor(text)
assert (
colored_text
== f"{CSI}38;2;120;160;200m{UNDERLINE}{text}{RESET_UNDERLINE}{RESET_COLOR}"
)
def test_create_color_cross():
text = "Hello"
mycolor = create_color(120, 160, 200, s="c")
colored_text = mycolor(text)
assert (
colored_text == f"{CSI}38;2;120;160;200m{CROSS}{text}{RESET_CROSS}{RESET_COLOR}"
)
def test_create_color_style_only():
# Red background
text = "Hello"
mycolor = create_color(s="b")
colored_text = mycolor(text)
assert colored_text == f"{BOLD}{text}{RESET_BOLD_AND_DIM}"
def test_create_color_background():
# Red background
text = "Hello"
mycolor = create_color(bg=(120, 160, 200))
colored_text = mycolor(text)
assert colored_text == f"{CSI}48;2;120;160;200m{text}{RESET_BACKGROUND}"
mycolor = create_color(bg=(128, 128, 128))
colored_text = mycolor(text)
assert colored_text == f"{CSI}48;2;128;128;128m{text}{RESET_BACKGROUND}"
def test_create_color_signature_error():
with pytest.raises(TypeError):
create_color(12, 12)
with pytest.raises(TypeError):
create_color(12, 12, "lol")
with pytest.raises(TypeError):
create_color(12)
| [
"coloring.create_color",
"pytest.raises"
] | [((139, 166), 'coloring.create_color', 'create_color', (['(120)', '(160)', '(200)'], {}), '(120, 160, 200)\n', (151, 166), False, 'from coloring import create_color\n'), ((287, 314), 'coloring.create_color', 'create_color', (['(128)', '(128)', '(128)'], {}), '(128, 128, 128)\n', (299, 314), False, 'from coloring import create_color\n'), ((486, 520), 'coloring.create_color', 'create_color', (['(120)', '(160)', '(200)'], {'s': '"""b"""'}), "(120, 160, 200, s='b')\n", (498, 520), False, 'from coloring import create_color\n'), ((747, 781), 'coloring.create_color', 'create_color', (['(120)', '(160)', '(200)'], {'s': '"""u"""'}), "(120, 160, 200, s='u')\n", (759, 781), False, 'from coloring import create_color\n'), ((1006, 1040), 'coloring.create_color', 'create_color', (['(120)', '(160)', '(200)'], {'s': '"""c"""'}), "(120, 160, 200, s='c')\n", (1018, 1040), False, 'from coloring import create_color\n'), ((1275, 1294), 'coloring.create_color', 'create_color', ([], {'s': '"""b"""'}), "(s='b')\n", (1287, 1294), False, 'from coloring import create_color\n'), ((1484, 1516), 'coloring.create_color', 'create_color', ([], {'bg': '(120, 160, 200)'}), '(bg=(120, 160, 200))\n', (1496, 1516), False, 'from coloring import create_color\n'), ((1642, 1674), 'coloring.create_color', 'create_color', ([], {'bg': '(128, 128, 128)'}), '(bg=(128, 128, 128))\n', (1654, 1674), False, 'from coloring import create_color\n'), ((1837, 1861), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (1850, 1861), False, 'import pytest\n'), ((1871, 1891), 'coloring.create_color', 'create_color', (['(12)', '(12)'], {}), '(12, 12)\n', (1883, 1891), False, 'from coloring import create_color\n'), ((1902, 1926), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (1915, 1926), False, 'import pytest\n'), ((1936, 1963), 'coloring.create_color', 'create_color', (['(12)', '(12)', '"""lol"""'], {}), "(12, 12, 'lol')\n", (1948, 1963), False, 'from coloring import create_color\n'), ((1974, 1998), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (1987, 1998), False, 'import pytest\n'), ((2008, 2024), 'coloring.create_color', 'create_color', (['(12)'], {}), '(12)\n', (2020, 2024), False, 'from coloring import create_color\n')] |
from setuptools import setup
setup(name='embed',
version='0.1',
description='Basic immplementation of knowledge graph embedding. ',
url='https://github.com/pbloem/embed',
author='<NAME>',
author_email='<EMAIL>',
license='MIT',
packages=['embed'],
install_requires=[
'torch',
'tensorboard',
'tqdm'
],
zip_safe=False) | [
"setuptools.setup"
] | [((30, 333), 'setuptools.setup', 'setup', ([], {'name': '"""embed"""', 'version': '"""0.1"""', 'description': '"""Basic immplementation of knowledge graph embedding. """', 'url': '"""https://github.com/pbloem/embed"""', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'license': '"""MIT"""', 'packages': "['embed']", 'install_requires': "['torch', 'tensorboard', 'tqdm']", 'zip_safe': '(False)'}), "(name='embed', version='0.1', description=\n 'Basic immplementation of knowledge graph embedding. ', url=\n 'https://github.com/pbloem/embed', author='<NAME>', author_email=\n '<EMAIL>', license='MIT', packages=['embed'], install_requires=['torch',\n 'tensorboard', 'tqdm'], zip_safe=False)\n", (35, 333), False, 'from setuptools import setup\n')] |
import os
import sys
import unittest
# Make sure the path of the framework is included in the import path
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../src/')))
# Framework imports
from mdp import Sokoban, SokobanBuilder
class TestSokoban(unittest.TestCase):
def test_builder(self):
builder = SokobanBuilder(level_name='suitcase-05-01')
true_string_representation = '########\n' \
+ '# ..$ #\n' \
+ '# $@ $ #\n' \
+ '# $.. #\n' \
+ '########'
# This is the dynamic state, which can change over time.
true_initial_state = { 'box(6,2)', 'box(3,3)', 'box(6,3)', 'box(3,4)',
'sokoban(4,3)'
}
# This is the static state, which won't change.
true_static_state = { 'block(1,1)', 'block(2,1)', 'block(3,1)', 'block(4,1)',
'block(5,1)', 'block(6,1)', 'block(7,1)', 'block(8,1)',
'block(1,2)', 'block(8,2)',
'block(1,3)', 'block(8,3)',
'block(1,4)', 'block(8,4)',
'block(1,5)', 'block(2,5)', 'block(3,5)', 'block(4,5)',
'block(5,5)', 'block(6,5)', 'block(7,5)', 'block(8,5)',
'dest(4,2)', 'dest(5,2)', 'dest(4,4)', 'dest(5,4)',
'row(1)', 'row(2)', 'row(3)', 'row(4)', 'row(5)',
'col(1)', 'col(2)', 'col(3)', 'col(4)',
'col(5)', 'col(6)', 'col(7)', 'col(8)',
}
self.assertEqual(true_string_representation, builder.level_txt)
self.assertSetEqual(true_initial_state, builder.level_asp_initial)
self.assertSetEqual(true_static_state, builder.level_asp_static)
mdp = builder.build_mdp()
self.assertEqual(true_initial_state, mdp.state)
def test_available_actions_1(self):
builder = SokobanBuilder(level_name='suitcase-05-01')
mdp = builder.build_mdp()
true_available_actions = { 'push(6,2,left)', 'push(6,2,right)',
'push(3,3,left)', 'push(3,3,right)',
'push(6,3,left)', 'push(6,3,right)',
'push(3,4,left)', 'push(3,4,right)'
}
self.assertSetEqual(true_available_actions, mdp.available_actions)
def test_available_actions_2(self):
builder = SokobanBuilder(level_name='suitcase-05-02')
mdp = builder.build_mdp()
true_available_actions = { 'push(5,3,left)', 'push(5,3,right)', 'push(5,3,down)',
'push(4,4,right)', 'push(4,4,up)', 'push(4,4,down)',
'push(6,4,left)', 'push(6,4,up)', 'push(6,4,down)',
'push(5,5,left)', 'push(5,5,right)', 'push(5,5,up)'
}
self.assertSetEqual(true_available_actions, mdp.available_actions)
def test_available_actions_3(self):
builder = SokobanBuilder(level_name='suitcase-05-01a')
mdp = builder.build_mdp()
true_available_actions = { 'push(5,2,right)',
'push(4,3,right)',
'push(4,4,right)'
}
self.assertSetEqual(true_available_actions, mdp.available_actions)
def test_no_actions_available(self):
builder = SokobanBuilder(level_name='suitcase-05-01b')
mdp = builder.build_mdp()
self.assertEqual(set(), mdp.available_actions)
self.assertEqual(0, len(mdp.available_actions))
def test_no_actions_available_after_action(self):
builder = SokobanBuilder(level_name='suitcase-05-01c')
mdp = builder.build_mdp()
mdp.transition('push(3,2,right)')
self.assertEqual(set(), mdp.available_actions)
self.assertEqual(0, len(mdp.available_actions))
def test_executing_wrong_actions(self):
builder = SokobanBuilder(level_name='suitcase-05-01')
mdp = builder.build_mdp()
with self.assertRaises(Exception):
mdp.transition('push(3,3,up)')
def test_transition_1(self):
"""
Just messing around with arbitrary moves:
State 0: State 1: State 2:
####### ####### #######
# # # # # #
## .$. # ## .@. # ## * . #
#@ $ $ # # $$$ # # @$$ #
# .$. # # .$. # # .$. #
## # ## # ## #
####### ####### #######
"""
builder = SokobanBuilder(level_name='suitcase-05-02')
mdp = builder.build_mdp()
state_0 = mdp.state
next_state, next_reward = mdp.transition('push(5,3,down)')
state_1 = state_0 - {'box(5,3)', 'sokoban(2,4)'} | {'box(5,4)', 'sokoban(5,3)'}
self.assertSetEqual(state_1, mdp.state)
self.assertSetEqual(state_1, next_state)
self.assertEqual(-1, next_reward)
true_available_actions_1 = { 'push(4,4,up)', 'push(4,4,down)',
'push(6,4,up)', 'push(6,4,down)',
'push(5,5,left)', 'push(5,5,right)'
}
self.assertEqual(true_available_actions_1, mdp.available_actions)
next_state, next_reward = mdp.transition('push(4,4,up)')
state_2 = state_1 - {'box(4,4)', 'sokoban(5,3)'} | {'box(4,3)', 'sokoban(4,4)'}
self.assertSetEqual(state_2, mdp.state)
self.assertSetEqual(state_2, next_state)
self.assertEqual(-1, next_reward)
true_available_actions_2 = { 'push(4,3,up)', 'push(4,3,down)', 'push(4,3,left)', 'push(4,3,right)',
'push(6,4,up)', 'push(6,4,down)',
'push(5,5,left)', 'push(5,5,right)'
}
self.assertEqual(true_available_actions_2, mdp.available_actions)
self.assertEqual([None, -1, -1], mdp.reward_history)
self.assertEqual([-2, -1, 0], mdp.return_history)
def test_transition_2(self):
"""
Moving a block into a corner should end the MDP.
State 0 State 4
######## ########
# ..$ # # ..$ #
# $@ $ # # $ $ #
# $.. # #$@.. #
######## ########
"""
builder = SokobanBuilder(level_name='suitcase-05-01')
mdp = builder.build_mdp()
state_0 = mdp.state
state_1 = state_0 - { 'box(3,4)', 'sokoban(4,3)' } \
| { 'box(2,4)', 'sokoban(3,4)' }
next_state, next_reward = mdp.transition('push(3,4,left)')
self.assertSetEqual(state_1, mdp.state)
self.assertSetEqual(state_1, next_state)
self.assertEqual(-101, next_reward)
self.assertSetEqual(set(), mdp.available_actions)
self.assertEqual([None, -101], mdp.reward_history)
self.assertEqual([-101, 0], mdp.return_history)
def test_transition_3(self):
"""
Get a reward when moving to the goal state.
State 0 State 1 State 2 State 3
######## ######## ######## ########
# ..$ # # ..$ # # $..$ # # @*.$ #
# $@ $ # # $ $ # # @ $ # # $ #
# $.. # # @*. # # *. # # *. #
######## ######## ######## ########
State 4 State 5 State 6
######## ######## ########
# **@ # # ** # # ** #
# $ # # @ # # #
# *. # # *.$ # # **@ #
######## ######## ########
"""
builder = SokobanBuilder(level_name='suitcase-05-01')
mdp = builder.build_mdp()
state_0 = mdp.state
mdp.transition('push(3,4,right)')
mdp.transition('push(3,3,up)')
mdp.transition('push(3,2,right)')
mdp.transition('push(6,2,left)')
mdp.transition('push(6,3,down)')
next_state, next_reward = mdp.transition('push(6,4,left)')
true_state_8 = { 'box(4,2)', 'box(5,2)', 'box(4,4)', 'box(5,4)',
'sokoban(6,4)' }
self.assertSetEqual(true_state_8, mdp.state)
self.assertSetEqual(true_state_8, next_state)
self.assertEqual(99, next_reward)
self.assertEqual([None, -1, -1, -1, -1, -1, 99], mdp.reward_history)
self.assertEqual([94, 95, 96, 97, 98, 99, 0], mdp.return_history)
| [
"os.path.dirname",
"mdp.SokobanBuilder"
] | [((347, 390), 'mdp.SokobanBuilder', 'SokobanBuilder', ([], {'level_name': '"""suitcase-05-01"""'}), "(level_name='suitcase-05-01')\n", (361, 390), False, 'from mdp import Sokoban, SokobanBuilder\n'), ((2187, 2230), 'mdp.SokobanBuilder', 'SokobanBuilder', ([], {'level_name': '"""suitcase-05-01"""'}), "(level_name='suitcase-05-01')\n", (2201, 2230), False, 'from mdp import Sokoban, SokobanBuilder\n'), ((2729, 2772), 'mdp.SokobanBuilder', 'SokobanBuilder', ([], {'level_name': '"""suitcase-05-02"""'}), "(level_name='suitcase-05-02')\n", (2743, 2772), False, 'from mdp import Sokoban, SokobanBuilder\n'), ((3335, 3379), 'mdp.SokobanBuilder', 'SokobanBuilder', ([], {'level_name': '"""suitcase-05-01a"""'}), "(level_name='suitcase-05-01a')\n", (3349, 3379), False, 'from mdp import Sokoban, SokobanBuilder\n'), ((3751, 3795), 'mdp.SokobanBuilder', 'SokobanBuilder', ([], {'level_name': '"""suitcase-05-01b"""'}), "(level_name='suitcase-05-01b')\n", (3765, 3795), False, 'from mdp import Sokoban, SokobanBuilder\n'), ((4016, 4060), 'mdp.SokobanBuilder', 'SokobanBuilder', ([], {'level_name': '"""suitcase-05-01c"""'}), "(level_name='suitcase-05-01c')\n", (4030, 4060), False, 'from mdp import Sokoban, SokobanBuilder\n'), ((4314, 4357), 'mdp.SokobanBuilder', 'SokobanBuilder', ([], {'level_name': '"""suitcase-05-01"""'}), "(level_name='suitcase-05-01')\n", (4328, 4357), False, 'from mdp import Sokoban, SokobanBuilder\n'), ((5028, 5071), 'mdp.SokobanBuilder', 'SokobanBuilder', ([], {'level_name': '"""suitcase-05-02"""'}), "(level_name='suitcase-05-02')\n", (5042, 5071), False, 'from mdp import Sokoban, SokobanBuilder\n'), ((6888, 6931), 'mdp.SokobanBuilder', 'SokobanBuilder', ([], {'level_name': '"""suitcase-05-01"""'}), "(level_name='suitcase-05-01')\n", (6902, 6931), False, 'from mdp import Sokoban, SokobanBuilder\n'), ((8387, 8430), 'mdp.SokobanBuilder', 'SokobanBuilder', ([], {'level_name': '"""suitcase-05-01"""'}), "(level_name='suitcase-05-01')\n", (8401, 8430), False, 'from mdp import Sokoban, SokobanBuilder\n'), ((155, 180), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (170, 180), False, 'import os\n')] |
"""Strip user paths from Jupyter notebook."""
import json
import os
import re
import sys
from pathlib import Path
from typing import Iterator, Mapping, Optional, Sequence
from nb_strip_paths.cmdline import CLIArgs
from nb_strip_paths.find_root import find_project_root
EXCLUDES = (
r"/("
r"\.direnv|\.eggs|\.git|\.hg|\.ipynb_checkpoints|\.mypy_cache|\.nox|\.svn|\.tox|\.venv|"
r"_build|buck-out|build|dist|venv"
r")/"
)
def _get_notebooks(root_dir: str) -> Iterator[Path]:
"""
Get generator with all notebooks in directory.
Parameters
----------
root_dir
Notebook or directory to run third-party tool on.
Returns
-------
notebooks
All Jupyter Notebooks found in directory.
"""
if not Path(root_dir).is_dir():
return iter((Path(root_dir),))
return (
i
for i in Path(root_dir).rglob("*.ipynb")
if not re.search(EXCLUDES, str(i.resolve().as_posix()))
)
def _filter_by_include_exclude(
notebooks: Iterator[Path],
include: Optional[str],
exclude: Optional[str],
) -> Iterator[Path]:
"""
Include files which match include, exclude those matching exclude.
notebooks
Notebooks (not directories) to run code quality tool on.
include:
Global file include pattern.
exclude:
Global file exclude pattern.
Returns
-------
Iterator
Notebooks matching include and not matching exclude.
"""
include = include or ""
exclude = exclude or "^$"
include_re, exclude_re = re.compile(include), re.compile(exclude)
return (
notebook
for notebook in notebooks
if include_re.search(str(notebook.as_posix()))
if not exclude_re.search(str(notebook.as_posix()))
)
def _get_all_notebooks(
root_dirs: Sequence[str], include: Optional[str], exclude: Optional[str]
) -> Iterator[Path]:
"""
Get generator with all notebooks passed in via the command-line, applying exclusions.
Parameters
----------
root_dirs
All the notebooks/directories passed in via the command-line.
Returns
-------
Iterator
All Jupyter Notebooks found in all passed directories/notebooks.
"""
return _filter_by_include_exclude(
(j for i in root_dirs for j in _get_notebooks(i)), include, exclude
)
def _strip_paths(notebook_json: Mapping, project_root: Path):
"""Strip user paths from given notebook."""
project_root_string = str(project_root) + os.sep
mutated = False
for cell in notebook_json["cells"]:
if cell["cell_type"] == "code":
for output in cell["outputs"]:
for line_number, line in enumerate(output.get("text", [])):
if project_root_string in line:
output["text"][line_number] = line.replace(
project_root_string, ""
)
mutated = True
return notebook_json, mutated
def _run_on_one_root_dir(cli_args: CLIArgs, project_root: Path) -> int:
"""
Run third-party tool on a single notebook or directory.
Parameters
----------
cli_args
Command line arguments passed to nb-strip-paths.
project_root
Root of repository, where .git / .hg / .nbqa.ini file is.
Returns
-------
int
Output code from third-party tool.
Raises
------
RuntimeError
If unable to parse or reconstruct notebook.
"""
all_notebooks = list(
_get_all_notebooks(cli_args.root_dirs, cli_args.include, cli_args.exclude)
)
for notebook in all_notebooks:
print("Replacing user paths in", notebook)
notebook_json = json.loads(notebook.read_text(encoding="utf-8"))
notebook_json, mutated = _strip_paths(notebook_json, project_root)
if mutated:
notebook.write_text(
f"{json.dumps(notebook_json, indent=1, ensure_ascii=False)}\n",
encoding="utf-8",
)
if not all_notebooks:
sys.stderr.write(
"No .ipynb notebooks found in given directories: "
f"{' '.join(i for i in cli_args.root_dirs if Path(i).is_dir())}\n"
)
return 0
output_code = 0
return output_code
def main(argv: Optional[Sequence[str]] = None) -> None:
"""
Strip user paths from notebook or directory.
Parameters
----------
argv
Command-line arguments (if calling this function directly), defaults to
:code:`None` if calling via command-line.
"""
cli_args: CLIArgs = CLIArgs.parse_args(argv)
project_root: Path = find_project_root(tuple(cli_args.root_dirs))
output_code = _run_on_one_root_dir(cli_args, project_root)
sys.exit(output_code)
if __name__ == "__main__":
main()
| [
"re.compile",
"pathlib.Path",
"json.dumps",
"sys.exit",
"nb_strip_paths.cmdline.CLIArgs.parse_args"
] | [((4640, 4664), 'nb_strip_paths.cmdline.CLIArgs.parse_args', 'CLIArgs.parse_args', (['argv'], {}), '(argv)\n', (4658, 4664), False, 'from nb_strip_paths.cmdline import CLIArgs\n'), ((4804, 4825), 'sys.exit', 'sys.exit', (['output_code'], {}), '(output_code)\n', (4812, 4825), False, 'import sys\n'), ((1564, 1583), 're.compile', 're.compile', (['include'], {}), '(include)\n', (1574, 1583), False, 'import re\n'), ((1585, 1604), 're.compile', 're.compile', (['exclude'], {}), '(exclude)\n', (1595, 1604), False, 'import re\n'), ((763, 777), 'pathlib.Path', 'Path', (['root_dir'], {}), '(root_dir)\n', (767, 777), False, 'from pathlib import Path\n'), ((809, 823), 'pathlib.Path', 'Path', (['root_dir'], {}), '(root_dir)\n', (813, 823), False, 'from pathlib import Path\n'), ((867, 881), 'pathlib.Path', 'Path', (['root_dir'], {}), '(root_dir)\n', (871, 881), False, 'from pathlib import Path\n'), ((3947, 4002), 'json.dumps', 'json.dumps', (['notebook_json'], {'indent': '(1)', 'ensure_ascii': '(False)'}), '(notebook_json, indent=1, ensure_ascii=False)\n', (3957, 4002), False, 'import json\n'), ((4229, 4236), 'pathlib.Path', 'Path', (['i'], {}), '(i)\n', (4233, 4236), False, 'from pathlib import Path\n')] |
"""WARNING Just an experiment - please ignore this."""
from i3configger import config
BINDCODE = "bindcode"
BINDSYM = "bindsym"
class Bindings:
"""
bindsym | bindcode
[--release] [<Group>+][<Modifiers>+]<keysym> command
[--release] [--border] [--whole-window] [<Modifiers>+]button<n> command
"""
def __init__(self, content):
self.content = content
def get_all_bindings(self):
lines = [l.strip() for l in self.content.splitlines()]
lines = [l for l in lines if any(m in l for m in [BINDCODE, BINDSYM])]
lines = [l for l in lines if not l.startswith(config.MARK.COMMENT)]
return sorted(set(lines))
def translate_bindings(self):
"""translate bindcode to bindsym assignments
this need to be done the moment the information is asked because it
depends on the currently active layout.
"""
raise NotImplementedError()
def write_bindings_info(self):
"""Write info in some format that can be nicely displayed"""
raise NotImplementedError()
if __name__ == "__main__":
# use partials and account for modes
# a naming convention would make this quite easy
# mode-<modename>.conf -> bindings active in <modename>
p = config.I3configgerConfig().targetPath
b = Bindings(p.read_text())
print("\n".join(b.get_all_bindings()))
| [
"i3configger.config.I3configgerConfig"
] | [((1263, 1289), 'i3configger.config.I3configgerConfig', 'config.I3configgerConfig', ([], {}), '()\n', (1287, 1289), False, 'from i3configger import config\n')] |
import numpy as np
#DEFINE INNER FUNCTIONS
def inv_log_func(x, a, b):
return ((a * starting_score) / (2 + np.log(b * x)))
def bump_func(x,e):
return (e * np.sin(x - np.pi / 2)) + e
def sin_vals(ampl,steps):
if (steps < 1): steps = 1
sin_step = (np.pi * 2.0) / steps
x_range = np.arange(0,np.pi * 2.0 + 0.1,sin_step)
sin_vals = [bump_func(x,ampl) for x in x_range]
return sin_vals
def make_line_trio_data(ami,a,ama,bmi,b,bma):
min_data = [int(inv_log_func(x,ami,bmi)) if x <= 360 else int(inv_log_func(x,ami,bmi) + 0.003 * x) for x in data_range]
mid_data = [int(inv_log_func(x,a,b)) if x < 120 else int(inv_log_func(x,a,b) - 0.017 * x) for x in data_range]
max_data = [int(inv_log_func(x,ama,bma) - 0.029 * x) for x in data_range]
return min_data,mid_data,max_data
def add_bump_func(vals,ampl,frm,to):
bump_vals = sin_vals(ampl,to - frm)
sini = 0
for i in range(frm,to):
sini += 1
vals[i] += bump_vals[sini] * 0.5
vals[i] = int(vals[i])
def add_bump_trio(ampl,xmi,x,xma,frm,to):
add_bump_func(xmi,ampl,frm,to)
add_bump_func(x,ampl,frm,to)
add_bump_func(xma,ampl,frm,to)
def make_trio(a,b,c,d,ampl):
mini,mid,maxi = make_line_trio_data(a[0],a[1],a[2],b[0],b[1],b[2])
bump_start = int(len(mid) * c)
bump_end = len(mid) - d
add_bump_trio(ampl,mini,mid,maxi,bump_start,bump_end)
return [a[0]] + mini,[a[1]] + mid,[a[2]] + maxi
def get_az():
ami = np.random.uniform(low=0.39, high=0.7)
amd = np.random.uniform(low=0.71, high=0.98)
ama = np.random.uniform(low=0.985, high=1.205)
return [ami,amd,ama]
def get_bz():
bmi = b * np.random.uniform(low=0.9, high=0.95)
bmd = b * np.random.uniform(low=0.96, high=1.1)
bma = b * np.random.uniform(low=1.15, high=1.67)
return [bmi,bmd,bma]
def make_trios(count):
all_lines = []
az = [a_min,a,a_max]
bz = [b_min,b,b_max]
c = 0.62
d = 6
e = 10
for i in range(count):
all_lines.extend(make_trio(az,bz,c,d,e))
az = get_az()
bz = get_bz()
c = np.random.uniform(low=0.21, high=0.9)
d = int(np.random.uniform(low=1, high=30))
e = int(np.random.uniform(low=5, high=13))
return all_lines
#DEFINE STARTING VALUES
starting_score = 342.5
a = 1.0
b = 0.025
a_max = 1.2
b_max = b * 1.15
a_min = 0.8
b_min = b * 0.85
e = 10
line_count = 150
range_start = 60
range_end = 1200
step = 15
data_range = np.arange(range_start,range_end,step)
rand_seed = 21
np.random.seed(rand_seed)
csv_path = 'W:\Datasets\synth_scoring\lines.csv'
#PUT IT ALL TOGETHER
all_lines = make_trios(line_count)
for line in all_lines:
line = np.asarray(line)
all_lines = np.asarray(all_lines)
np.savetxt(csv_path,all_lines) | [
"numpy.log",
"numpy.asarray",
"numpy.random.seed",
"numpy.savetxt",
"numpy.random.uniform",
"numpy.sin",
"numpy.arange"
] | [((2457, 2496), 'numpy.arange', 'np.arange', (['range_start', 'range_end', 'step'], {}), '(range_start, range_end, step)\n', (2466, 2496), True, 'import numpy as np\n'), ((2511, 2536), 'numpy.random.seed', 'np.random.seed', (['rand_seed'], {}), '(rand_seed)\n', (2525, 2536), True, 'import numpy as np\n'), ((2709, 2730), 'numpy.asarray', 'np.asarray', (['all_lines'], {}), '(all_lines)\n', (2719, 2730), True, 'import numpy as np\n'), ((2731, 2762), 'numpy.savetxt', 'np.savetxt', (['csv_path', 'all_lines'], {}), '(csv_path, all_lines)\n', (2741, 2762), True, 'import numpy as np\n'), ((299, 340), 'numpy.arange', 'np.arange', (['(0)', '(np.pi * 2.0 + 0.1)', 'sin_step'], {}), '(0, np.pi * 2.0 + 0.1, sin_step)\n', (308, 340), True, 'import numpy as np\n'), ((1467, 1504), 'numpy.random.uniform', 'np.random.uniform', ([], {'low': '(0.39)', 'high': '(0.7)'}), '(low=0.39, high=0.7)\n', (1484, 1504), True, 'import numpy as np\n'), ((1515, 1553), 'numpy.random.uniform', 'np.random.uniform', ([], {'low': '(0.71)', 'high': '(0.98)'}), '(low=0.71, high=0.98)\n', (1532, 1553), True, 'import numpy as np\n'), ((1564, 1604), 'numpy.random.uniform', 'np.random.uniform', ([], {'low': '(0.985)', 'high': '(1.205)'}), '(low=0.985, high=1.205)\n', (1581, 1604), True, 'import numpy as np\n'), ((2679, 2695), 'numpy.asarray', 'np.asarray', (['line'], {}), '(line)\n', (2689, 2695), True, 'import numpy as np\n'), ((1659, 1696), 'numpy.random.uniform', 'np.random.uniform', ([], {'low': '(0.9)', 'high': '(0.95)'}), '(low=0.9, high=0.95)\n', (1676, 1696), True, 'import numpy as np\n'), ((1711, 1748), 'numpy.random.uniform', 'np.random.uniform', ([], {'low': '(0.96)', 'high': '(1.1)'}), '(low=0.96, high=1.1)\n', (1728, 1748), True, 'import numpy as np\n'), ((1763, 1801), 'numpy.random.uniform', 'np.random.uniform', ([], {'low': '(1.15)', 'high': '(1.67)'}), '(low=1.15, high=1.67)\n', (1780, 1801), True, 'import numpy as np\n'), ((2086, 2123), 'numpy.random.uniform', 'np.random.uniform', ([], {'low': '(0.21)', 'high': '(0.9)'}), '(low=0.21, high=0.9)\n', (2103, 2123), True, 'import numpy as np\n'), ((111, 124), 'numpy.log', 'np.log', (['(b * x)'], {}), '(b * x)\n', (117, 124), True, 'import numpy as np\n'), ((164, 185), 'numpy.sin', 'np.sin', (['(x - np.pi / 2)'], {}), '(x - np.pi / 2)\n', (170, 185), True, 'import numpy as np\n'), ((2140, 2173), 'numpy.random.uniform', 'np.random.uniform', ([], {'low': '(1)', 'high': '(30)'}), '(low=1, high=30)\n', (2157, 2173), True, 'import numpy as np\n'), ((2191, 2224), 'numpy.random.uniform', 'np.random.uniform', ([], {'low': '(5)', 'high': '(13)'}), '(low=5, high=13)\n', (2208, 2224), True, 'import numpy as np\n')] |
from pynput import keyboard
import sys
import socket
import requests
import json
import logging
import configparser
config = configparser.ConfigParser()
config.read('config.ini')
FILENAME = config['DEFAULT']['FILENAME']
LOG_DIR = config['DEFAULT']['LOG_DIR']
LOGFILE = config['DEFAULT']['LOGFILE']
ESCAPE_STRING = config['DEFAULT']['ESCAPE_STRING']
FLAGS_config = config['DEFAULT']['FLAGS']
FLAGS = FLAGS_config.split(',')
URL = config['DEFAULT']['URL']
HOSTNAME = socket.gethostname()
logging.basicConfig(filename=(LOG_DIR + LOGFILE), level=logging.DEBUG, format='%(asctime)s: %(message)s')
def get_key_name(key):
if isinstance(key, keyboard.KeyCode):
return key.char
else:
return str(key)
def on_press(key):
key_name = get_key_name(key)
# Logging pressed key into buffer
if (key.__class__.__name__=="KeyCode"):
file = open(FILENAME,"a")
file.write(key_name)
file.close()
def on_release(key):
# Reading buffer on key release
file2 = open(FILENAME,"r")
data = file2.read()
# Check last chars against exit keyword
if (data[-1*len(ESCAPE_STRING):]==ESCAPE_STRING):
# If match, close the file, delete its content, and exit
file2.close()
file3 = open(FILENAME,"w")
file3.write("")
file3.close()
logging.info("Escape string caught - Exiting...")
sys.exit()
# Check last chars against keywords
for flag in FLAGS:
if (data[-1*len(flag):]==flag):
file2.close()
send_data(flag)
break
def send_data(flag):
logging.info("Keyword " + flag + " caught - Sending API request...")
data = {"hostname":HOSTNAME, "flag":flag}
payload = json.dumps(data)
try:
res = requests.post(url = URL, json = data)
if (res.text=="Message received"):
logging.info("API request successfully sent. Clearing buffer...")
else:
logging.error("Unexpected response from the server. Clearing buffer...")
file3 = open(FILENAME,"w")
file3.write("")
file3.close()
except:
logging.error("HTTP connection error - " + URL + " is not responding")
logging.info("Starting capture...")
with keyboard.Listener(
on_press = on_press,
on_release = on_release) as listener:
listener.join() | [
"logging.basicConfig",
"pynput.keyboard.Listener",
"requests.post",
"configparser.ConfigParser",
"json.dumps",
"sys.exit",
"socket.gethostname",
"logging.info",
"logging.error"
] | [((126, 153), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (151, 153), False, 'import configparser\n'), ((468, 488), 'socket.gethostname', 'socket.gethostname', ([], {}), '()\n', (486, 488), False, 'import socket\n'), ((490, 598), 'logging.basicConfig', 'logging.basicConfig', ([], {'filename': '(LOG_DIR + LOGFILE)', 'level': 'logging.DEBUG', 'format': '"""%(asctime)s: %(message)s"""'}), "(filename=LOG_DIR + LOGFILE, level=logging.DEBUG, format\n ='%(asctime)s: %(message)s')\n", (509, 598), False, 'import logging\n'), ((2213, 2248), 'logging.info', 'logging.info', (['"""Starting capture..."""'], {}), "('Starting capture...')\n", (2225, 2248), False, 'import logging\n'), ((1612, 1680), 'logging.info', 'logging.info', (["('Keyword ' + flag + ' caught - Sending API request...')"], {}), "('Keyword ' + flag + ' caught - Sending API request...')\n", (1624, 1680), False, 'import logging\n'), ((1741, 1757), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (1751, 1757), False, 'import json\n'), ((2254, 2313), 'pynput.keyboard.Listener', 'keyboard.Listener', ([], {'on_press': 'on_press', 'on_release': 'on_release'}), '(on_press=on_press, on_release=on_release)\n', (2271, 2313), False, 'from pynput import keyboard\n'), ((1326, 1375), 'logging.info', 'logging.info', (['"""Escape string caught - Exiting..."""'], {}), "('Escape string caught - Exiting...')\n", (1338, 1375), False, 'import logging\n'), ((1384, 1394), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1392, 1394), False, 'import sys\n'), ((1781, 1814), 'requests.post', 'requests.post', ([], {'url': 'URL', 'json': 'data'}), '(url=URL, json=data)\n', (1794, 1814), False, 'import requests\n'), ((1874, 1939), 'logging.info', 'logging.info', (['"""API request successfully sent. Clearing buffer..."""'], {}), "('API request successfully sent. Clearing buffer...')\n", (1886, 1939), False, 'import logging\n'), ((1966, 2038), 'logging.error', 'logging.error', (['"""Unexpected response from the server. Clearing buffer..."""'], {}), "('Unexpected response from the server. Clearing buffer...')\n", (1979, 2038), False, 'import logging\n'), ((2141, 2211), 'logging.error', 'logging.error', (["('HTTP connection error - ' + URL + ' is not responding')"], {}), "('HTTP connection error - ' + URL + ' is not responding')\n", (2154, 2211), False, 'import logging\n')] |
import tenjin
from tenjin.helpers import *
import cgi
engine = tenjin.Engine(path=['views'], escapefunc="cgi.escape", tostrfunc="str")
print(engine.get_template('page.pyhtml').script)
| [
"tenjin.Engine"
] | [((63, 134), 'tenjin.Engine', 'tenjin.Engine', ([], {'path': "['views']", 'escapefunc': '"""cgi.escape"""', 'tostrfunc': '"""str"""'}), "(path=['views'], escapefunc='cgi.escape', tostrfunc='str')\n", (76, 134), False, 'import tenjin\n')] |
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Utilities and helper functions."""
import base64
import io
import tarfile
def tar_gz_base64(dir_path):
"""Compress target directory and convert to Base64 string."""
try:
f = io.BytesIO()
with tarfile.open(fileobj=f, mode='w:gz') as archive:
archive.add(dir_path, arcname='.')
return base64.b64encode(f.getvalue()).decode('utf-8')
finally:
f.close()
def decode_env_dict(prefix, env):
env_list = []
if not isinstance(env, dict):
return env_list
for k, v in env.items():
key = "%s_%s" % (prefix.upper(), k.upper())
if (isinstance(v, list)):
v = " ".join(v)
env_list.append("export %s=\"%s\"" % (key, v))
return env_list
def generate_default_env_list(env, additional_env_list=[]):
env_list = []
for k, v in env['configs'].items():
if k == 'kubernetes':
prefix = 'kube'
else:
prefix = k
env_list.extend(decode_env_dict(prefix, env['configs'][k]))
env_list.extend(additional_env_list)
return env_list
def generate_env_file(path, env, additional_env_list=[]):
env_list = generate_default_env_list(env, additional_env_list)
with open(path, 'w') as f:
for e in env_list:
f.write(e)
f.write('\n')
class Struct(dict):
"""Specialized dict where you access an item like an attribute
>>> struct = Struct()
>>> struct['a'] = 1
>>> struct.b = 2
>>> assert struct.a == 1
>>> assert struct['b'] == 2
"""
def __getattr__(self, name):
try:
return self[name]
except KeyError:
raise AttributeError(name)
def __setattr__(self, name, value):
try:
self[name] = value
except KeyError:
raise AttributeError(name)
| [
"tarfile.open",
"io.BytesIO"
] | [((794, 806), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (804, 806), False, 'import io\n'), ((820, 856), 'tarfile.open', 'tarfile.open', ([], {'fileobj': 'f', 'mode': '"""w:gz"""'}), "(fileobj=f, mode='w:gz')\n", (832, 856), False, 'import tarfile\n')] |
import os
import traceback
from ToolBox import utils
class SelectInterface():
def __init__(self, options=None):
if options is None:
options = {}
self.options = options
# options should be a dict
def add_option(self, option, alias=None):
if type(alias) is None:
alias = []
elif type(alias) is not list:
alias = [alias]
if option in self.options:
self.options[option] += alias
else:
self.options[option] = alias
def interactive(self, order_alias = True, patience=3, input_hint='>>> ', hint='Please choose from following options:\n', indent=' ', alias_del = ','):
info = hint
order_id = 1
for option in self.options:
info += indent
if order_alias:
info += '[{0}] '.format(order_id)
order_id += 1
info += option
if self.options[option] != []:
alias_string = ''
for alias in self.options[option]:
alias_string += alias + alias_del
alias_string = alias_string[:-1]
info += '[alias: {0}]'.format(alias_string)
info += '\n'
print(info, end='')
valid_ans = False
failed_time = 0
keys = list(self.options)
while not valid_ans:
ans = input(input_hint)
if order_alias:
if ans.isdigit():
order_id = int(ans) - 1
if order_id < len(keys):
return keys[order_id]
if ans in self.options:
return ans
else:
for key in self.options:
if ans in self.options[key]:
return key
failed_time += 1
print("Input {0} cannot be understood, please input again.".format(ans))
if failed_time > patience:
return None
return None
class console():
def __init__(self, name='base'):
self.name = name
self.hint = '$ '
self.exit_cmd = ['exit', 'quit', 'bye']
self.exit_info = 'ヾ(•ω•`)o'
self.commands = {}
self.alias = {}
self.warn_level = 4
self.exit_flag = False
self.debug = True
self.platform = utils.detect_platform()
self.is_child = False
self.father = None
self.regist_internal_command()
def get_hint(self):
if self.platform == 'Linux':
hint = '\033[0;33m({0})\033[0;31m{1}\033[0m'.format(self.name, self.hint)
else:
hint = '({0}){1}'.format(self.name, self.hint)
return hint
def regist_internal_command(self):
self.regist(
'help',
action=self.command_help,
alias=['h'],
help_info='display this help info.',
kind='sys'
)
self.regist(
'exit',
action=self.command_exit_console,
alias=['quit','bye'],
help_info='exit current console.',
kind='sys'
)
self.regist(
'cls',
action=self.command_clear_screen,
alias=['clear', 'clc'],
help_info='clear screen.',
kind='sys'
)
self.regist(
'alias',
action=self.command_alias,
help_info='display alias info or create new alias.',
kind='sys'
)
self.regist(
'os',
action=self.command_os,
help_info='run a system command.',
kind='sys'
)
def translate_command(self, command):
while command in self.alias and command not in self.commands:
command = self.alias[command]
return command
def find_equal_command(self, command, ret_type = str, ignored = []):
finished = []
new = []
cmds = [command]
while len(finished) != len(cmds):
# find child
if command in self.alias:
if self.alias[command] not in cmds:
cmds.append(self.alias[command])
# find fathers
for al in self.alias:
if self.alias[al] == command:
if al not in cmds:
cmds.append(al)
# found finished.
finished.append(command)
for cmd in cmds:
if cmd not in finished:
command = cmd
if ret_type is str:
finished = utils.list2csv(finished)
return finished
def get_alias(self, command, ret_type=str):
alias = []
for al in self.alias:
if self.alias[al] == command:
alias.append(al)
if ret_type is str:
alias = utils.list2csv(alias)
return alias
def command_exist(self, command):
if command in self.commands or command in self.alias:
return True
else:
return False
def add_alias(self, command, alias):
if self.command_exist(alias):
if warn_level >= 3:
print('Alias {0} will not be added since already used'.format(al))
else:
self.alias[alias] = command
# kind: standard or shared
# standard: help info will be displayed
# shared: help info will not be displayed in sub command.
def regist(self, command, action, alias=None, help_info='no help provided.', kind='standard'):
if type(action) == console:
action.is_child = True
action.father = self
exist = self.command_exist(command)
if exist:
if self.warn_level >=3:
print('Command {0} will not be added sinece already exist.'.format(command))
return
if type(alias) is list:
for al in alias:
self.add_alias(command, al)
elif type(alias) is str:
self.add_alias(command, alias)
elif alias is None:
pass
else:
if self.warn_level > 3:
print('Unknown alias type, no alias will be added.')
self.commands[command] = {}
self.commands[command]['action'] = action
self.commands[command]['help'] = help_info
self.commands[command]['kind'] = kind
def handle_command(self, command, args):
if command in self.commands:
act = self.commands[command]['action']
try:
act(args)
except KeyboardInterrupt:
pass
except:
print('Exception occured while processing command \"{0} {1}\".'.format(command, args))
print('More information are shown below.\n', traceback.format_exc())
else:
print('Unknown command \"{0}\"'.format(command))
# seperate command and its args.
def parse_command(self, string):
string += ' '
length = len(string)
command_end = 0
parse_start = False
for i in range(length):
blank = utils.is_blank(string[i])
if not blank:
parse_start=True
if parse_start and blank:
command_end = i
break
command = string[:command_end]
command = utils.remove_blank_in_endpoint(command)
args = utils.remove_blank_in_endpoint(string[command_end:])
return command, args
def parse(self, string):
command, args = self.parse_command(string)
exitsted_commands = []
while command in self.alias:
if command not in exitsted_commands:
exitsted_commands.append(command)
command = self.alias[command]
string = command + ' ' + args
command, args = self.parse_command(string)
else:
break
return command, args
def show_help_info(self, command, prefix, indent, depth=0):
command = self.translate_command(command)
action = self.commands[command]['action']
kind = self.commands[command]['kind']
if kind == 'sys' and depth > 0:
return
alias = self.get_alias(command, ret_type=str)
if alias != '':
print('{0}{1}({2}):'.format(prefix, command, alias))
else:
print('{0}{1}:'.format(prefix, command))
print('{0}{1}{2}'.format(prefix, indent, self.commands[command]['help']))
if type(action) == console:
action.command_help('', prefix=prefix+indent, indent=indent, depth=depth+1)
def debug_log(self, command, args):
if self.debug:
print('command:[{0}] args:[{1}]'.format(command, args))
def command_exit_console(self, args):
if not self.is_child:
print(self.exit_info)
self.exit_flag = True
def command_clear_screen(self, args):
if self.platform == 'Windows':
os.system('cls')
elif self.platform == 'Linux':
os.system('clear')
return False
def command_help(self, args, prefix = '', indent=' ', depth=0):
command, args = self.parse_command(args)
if command is not "":
if self.command_exist(command):
self.show_help_info(command, prefix, indent, depth)
else:
print('Unknown command \"{0}\"'.format(command))
else:
for command in self.commands:
self.show_help_info(command, prefix, indent, depth)
def command_alias(self, args):
alias_parse = args.split('=')
if len(alias_parse) == 2:
alias = utils.remove_blank_in_endpoint(alias_parse[0])
command = utils.remove_blank_in_endpoint(alias_parse[1])
if command is not '':
self.alias[alias]=command
else:
del self.alias[alias]
elif args == '':
for alias in self.alias:
print('{0}={1}'.format(alias, self.alias[alias]))
elif len(alias_parse) == 1:
if args in self.alias:
print('{0}={1}'.format(args, self.alias[args]))
equal_alias = self.find_equal_command(args)
if equal_alias != '':
print('Hint: {0} are all equivalent.'.format(equal_alias))
elif args in self.commands:
als = self.get_alias(args, ret_type=str)
if als == '':
print('command {0} has no alias.'.format(args))
else:
print('command {0} is aliased as {1}'.format(args, als))
equal_alias = self.find_equal_command(args)
if equal_alias != '' and equal_alias != args:
print('Hint: {0} are all equivalent.'.format(equal_alias))
else:
print('No alias \"{0}\" found.'.format(args))
else:
print('Syntax error, command not understood.')
def command_os(self, args):
if args == '':
print('please specify os command')
else:
os.system(args)
def execute(self, string):
command, args = self.parse(string)
if command is not "":
self.handle_command(command, args)
def __call__(self, args):
if args != '':
self.execute(args)
else:
self.exit_flag=False
self.interactive()
def interactive(self):
while not self.exit_flag:
try:
input_str = input(self.get_hint())
self.execute(input_str)
except(KeyboardInterrupt):
print('')
if __name__ == '__main__':
# con = console()
# con_sub = console()
# con_sub_sub = console()
# con_sub.regist('test_subsubcommand', con_sub_sub, alias='tss', help_info='A sub command.')
# con.regist('test_subcommand', con_sub, alias='ts', help_info='A sub command.')
# con.interactive()
selector = SelectInterface({"Y":[], "N":[]})
value = selector.interactive()
print("You selected {0}".format(value)) | [
"traceback.format_exc",
"ToolBox.utils.list2csv",
"ToolBox.utils.remove_blank_in_endpoint",
"ToolBox.utils.is_blank",
"ToolBox.utils.detect_platform",
"os.system"
] | [((2457, 2480), 'ToolBox.utils.detect_platform', 'utils.detect_platform', ([], {}), '()\n', (2478, 2480), False, 'from ToolBox import utils\n'), ((7691, 7730), 'ToolBox.utils.remove_blank_in_endpoint', 'utils.remove_blank_in_endpoint', (['command'], {}), '(command)\n', (7721, 7730), False, 'from ToolBox import utils\n'), ((7747, 7799), 'ToolBox.utils.remove_blank_in_endpoint', 'utils.remove_blank_in_endpoint', (['string[command_end:]'], {}), '(string[command_end:])\n', (7777, 7799), False, 'from ToolBox import utils\n'), ((4808, 4832), 'ToolBox.utils.list2csv', 'utils.list2csv', (['finished'], {}), '(finished)\n', (4822, 4832), False, 'from ToolBox import utils\n'), ((5093, 5114), 'ToolBox.utils.list2csv', 'utils.list2csv', (['alias'], {}), '(alias)\n', (5107, 5114), False, 'from ToolBox import utils\n'), ((7448, 7473), 'ToolBox.utils.is_blank', 'utils.is_blank', (['string[i]'], {}), '(string[i])\n', (7462, 7473), False, 'from ToolBox import utils\n'), ((9392, 9408), 'os.system', 'os.system', (['"""cls"""'], {}), "('cls')\n", (9401, 9408), False, 'import os\n'), ((10117, 10163), 'ToolBox.utils.remove_blank_in_endpoint', 'utils.remove_blank_in_endpoint', (['alias_parse[0]'], {}), '(alias_parse[0])\n', (10147, 10163), False, 'from ToolBox import utils\n'), ((10187, 10233), 'ToolBox.utils.remove_blank_in_endpoint', 'utils.remove_blank_in_endpoint', (['alias_parse[1]'], {}), '(alias_parse[1])\n', (10217, 10233), False, 'from ToolBox import utils\n'), ((11615, 11630), 'os.system', 'os.system', (['args'], {}), '(args)\n', (11624, 11630), False, 'import os\n'), ((9462, 9480), 'os.system', 'os.system', (['"""clear"""'], {}), "('clear')\n", (9471, 9480), False, 'import os\n'), ((7108, 7130), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (7128, 7130), False, 'import traceback\n')] |
#################################################################################################
# #
# MULTI-ARMED BANDITS ---- 10-ARM TESTBED SOFTMAX METHOD #
# #
# Author: <NAME> #
# #
# References: #
# 1) Sutton, R.S. and Barto, A.G., 2018. Reinforcement learning: An introduction. MIT press #
# 2) GitHub: #
# i) Sahana Ramnath - https://github.com/SahanaRamnath/MultiArmedBandit_RL #
# ii) <NAME> - https://github.com/jettdlee/10_armed_bandit #
# #
#################################################################################################
import numpy as np
import matplotlib.pyplot as plt
import random
import time
#Begin time counter
start_time = time.time()
n = 2000 # Number of bandit problems
k = 10 # Number of Arms
p = 1000 # Number of plays
T = [0.01,0.2,1,10] # Set of Temperature values
#Expected Reward for a selected action
q_t = np.random.normal(0,1,(n,k)) # q(a) = E [ R | A = a ]
#Optimal Action
A_t = np.argmax(q_t,1) # A_t = argmax [ Q_t (a) ]
#Initialize Plots
f1 = plt.figure().add_subplot(111)
f2 = plt.figure().add_subplot(111)
f1.title.set_text(r'$Temperature$-greedy : Average Reward Vs Steps for 10 arms')
f1.set_ylabel('Average Reward')
f1.set_xlabel('Steps')
f2.title.set_text(r'$temperature$ : $\%$ Optimal Action Vs Steps for 10 arms')
f2.set_ylabel(r'$\%$ Optimal Action')
f2.set_xlabel('Steps')
f2.set_ylim(0,100)
Legend_Entries1 = []
Legend_Text1 = []
Legend_Entries2 = []
Legend_Text2 = []
for temp in range(len(T)):
print('Start trials for temperature = ', T[temp])
time_e = time.time()
# Initialize Matrices
Q = np.zeros((n,k)) # Estimated Reward
N = np.ones((n,k)) # Number of Times each Arm was chosen
# Pull Each Arm atleast once. Therefore, assign random value > 1 for each arm
#Initial pull for all arms
Q_i = np.random.normal(q_t,1)
R_t = [0] #Initialize vector for reward values for each epsilon
R_t.append(np.mean(Q_i))
R_t_opt = [] # Optimal Reward for each epsilon
for pull in range(2, p + 1):
#print(pull)
R_p = [] # Initialize vector for all rewards for the pull
count_opt_arm_pulls = 0 # Initialize counter for counting number of pulls of the optimal pulls
for i in range(n):
#print(pull)
#print(i)
Q_ex = np.exp(Q[i]/T[temp])
Q_softmax = Q_ex/np.sum(Q_ex)
j = np.random.choice(range(k),1,p=Q_softmax)
temp_R = np.random.normal(q_t[i][j],1)
R_p.append(temp_R)
if j == A_t[i]:
count_opt_arm_pulls = count_opt_arm_pulls + 1
N[i][j] = N[i][j] + 1
Q[i][j] = Q[i][j] + (temp_R - Q[i][j]) / N[i][j]
R_p_avg = np.mean(R_p)
R_t.append(R_p_avg)
R_t_opt.append(float(count_opt_arm_pulls)*100/n)
f1.plot(range(0,p+1),R_t)
f2.plot(range(2,p+1),R_t_opt)
p1 = f1.plot(range(0,p+1),R_t)
p2 = f2.plot(range(2,p+1),R_t_opt)
Legend_Entries1.append(p1)
Legend_Entries2.append(p2)
if (T[temp] == 0):
print("Temperature = 0")
Legend_Text1.append(r"$T = $"+str(T[temp])+" (greedy) ")
Legend_Text2.append(r"$T = $"+str(T[temp])+" (greedy) ")
else:
Legend_Text1.append(r"$T = $"+str(T[temp]))
Legend_Text2.append(r"$T = $"+str(T[temp]))
#print(Legend_Text1)
print('Trials done for temperature = ', T[temp])
print("Execution Time for temperature " + str(T[temp]) + " = %s" % (time.time() - time_e) )
print("Total Execution time: %s seconds" % (time.time() - start_time))
f1.legend((Legend_Text1),loc='best')
f2.legend((Legend_Text2),loc='best')
plt.show()
| [
"numpy.random.normal",
"numpy.mean",
"numpy.ones",
"numpy.argmax",
"numpy.exp",
"numpy.sum",
"numpy.zeros",
"matplotlib.pyplot.figure",
"time.time",
"matplotlib.pyplot.show"
] | [((843, 854), 'time.time', 'time.time', ([], {}), '()\n', (852, 854), False, 'import time\n'), ((1058, 1088), 'numpy.random.normal', 'np.random.normal', (['(0)', '(1)', '(n, k)'], {}), '(0, 1, (n, k))\n', (1074, 1088), True, 'import numpy as np\n'), ((1135, 1152), 'numpy.argmax', 'np.argmax', (['q_t', '(1)'], {}), '(q_t, 1)\n', (1144, 1152), True, 'import numpy as np\n'), ((3619, 3629), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3627, 3629), True, 'import matplotlib.pyplot as plt\n'), ((1736, 1747), 'time.time', 'time.time', ([], {}), '()\n', (1745, 1747), False, 'import time\n'), ((1778, 1794), 'numpy.zeros', 'np.zeros', (['(n, k)'], {}), '((n, k))\n', (1786, 1794), True, 'import numpy as np\n'), ((1821, 1836), 'numpy.ones', 'np.ones', (['(n, k)'], {}), '((n, k))\n', (1828, 1836), True, 'import numpy as np\n'), ((1992, 2016), 'numpy.random.normal', 'np.random.normal', (['q_t', '(1)'], {}), '(q_t, 1)\n', (2008, 2016), True, 'import numpy as np\n'), ((1207, 1219), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (1217, 1219), True, 'import matplotlib.pyplot as plt\n'), ((1242, 1254), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (1252, 1254), True, 'import matplotlib.pyplot as plt\n'), ((2100, 2112), 'numpy.mean', 'np.mean', (['Q_i'], {}), '(Q_i)\n', (2107, 2112), True, 'import numpy as np\n'), ((2764, 2776), 'numpy.mean', 'np.mean', (['R_p'], {}), '(R_p)\n', (2771, 2776), True, 'import numpy as np\n'), ((2434, 2456), 'numpy.exp', 'np.exp', (['(Q[i] / T[temp])'], {}), '(Q[i] / T[temp])\n', (2440, 2456), True, 'import numpy as np\n'), ((2550, 2580), 'numpy.random.normal', 'np.random.normal', (['q_t[i][j]', '(1)'], {}), '(q_t[i][j], 1)\n', (2566, 2580), True, 'import numpy as np\n'), ((3518, 3529), 'time.time', 'time.time', ([], {}), '()\n', (3527, 3529), False, 'import time\n'), ((2475, 2487), 'numpy.sum', 'np.sum', (['Q_ex'], {}), '(Q_ex)\n', (2481, 2487), True, 'import numpy as np\n'), ((3449, 3460), 'time.time', 'time.time', ([], {}), '()\n', (3458, 3460), False, 'import time\n')] |
#!/usr/local/bin/python3
import os
import subprocess
from subprocess import Popen, PIPE, STDOUT
from configparser import ConfigParser
import subprocess
import plexapi
import schedule
import time
from datetime import datetime
import re
from colorama import Fore, Back, Style
import socket
from urllib import parse
from plexapi.server import PlexServer
config_object = ConfigParser()
config_object.read("/config/config.ini")
server = config_object["PLEXSERVER"]
schedules = config_object["SCHEDULES"]
options = config_object["OPTIONS"]
hdr_4k_posters = str.lower((options["4k_hdr_posters"]))
poster_3d = str.lower((options["3D_posters"]))
Disney = str.lower((options["Disney"]))
Pixar = (str.lower(options["Pixar"]))
hide_4k = str.lower((options["hide_4k"]))
pbak = str.lower((options["POSTER_BU"]))
HDR_BANNER = str.lower((options["HDR_BANNER"]))
optimise = str.lower((options["transcode"]))
mini_4k = str.lower((options["mini_4k"]))
mini_3d = str.lower((options["mini_3D"]))
t1 = (schedules["4k_poster_schedule"])
t2 = (schedules["disney_schedule"])
t3 = (schedules["pixar_schedule"])
t4 = (schedules["hide_poster_schedule"])
t5 = (schedules["3d_poster_schedule"])
url = parse.urlparse(server["PLEX_URL"]).hostname
try:
url = parse.urlparse(server["PLEX_URL"]).hostname
socket.inet_aton(url)
except socket.error:
raise Exception("Uh-Oh, it looks like your PLEX_URL is not correct in the config file \n Make sure you enter it as 'http://ip-address:plex-port'")
if server["TOKEN"] == '<token>':
raise Exception("You must add your Plex Token to the config file.")
try:
print("Your Server's Friendly name is ", PlexServer((server["PLEX_URL"]), (server["TOKEN"])).friendlyName)
except :
print('Cannot access your Plex account, please make sure that your Plex URL and Token are correct')
exit()
if pbak == 'true':
pass
elif pbak == 'false':
pass
else:
raise ValueError('SYNTAX ERROR: Please enter either "true" or "false" to set the script behaviour.')
if HDR_BANNER == 'true':
pass
elif HDR_BANNER == 'false':
pass
else:
raise ValueError('SYNTAX ERROR: Please enter either "true" or "false" to set the script behaviour.')
if mini_4k == 'true':
pass
elif mini_4k == 'false':
pass
else:
raise ValueError('SYNTAX ERROR: Please enter either "true" or "false" to set the script behaviour.')
if hdr_4k_posters == 'true':
pass
elif hdr_4k_posters == 'false':
pass
else:
raise ValueError('SYNTAX ERROR: Please enter either "true" or "false" to set the script behaviour.')
if poster_3d == 'true':
pass
elif poster_3d == 'false':
pass
else:
raise ValueError('SYNTAX ERROR: Please enter either "true" or "false" to set the script behaviour.')
if Disney == 'true':
pass
elif Disney == 'false':
pass
else:
raise ValueError('SYNTAX ERROR: Please enter either "true" or "false" to set the script behaviour.')
if Pixar == 'true':
pass
elif Pixar == 'false':
pass
else:
raise ValueError('SYNTAX ERROR: Please enter either "true" or "false" to set the script behaviour.')
if hide_4k == 'true':
pass
elif hide_4k == 'false':
pass
else:
raise ValueError('SYNTAX ERROR: Please enter either "true" or "false" to set the script behaviour.')
if optimise == 'true':
pass
elif optimise == 'false':
pass
else:
raise ValueError('SYNTAX ERROR: Please enter either "true" or "false" to set the script behaviour.')
a = re.compile("^[0-9]{2}:[0-9]{2}$")
if a.match(t1) and hdr_4k_posters == 'true':
pass
elif hdr_4k_posters != 'true':
pass
else:
raise ValueError('Please make sure that your scheduled times are written in the format HH:MM')
if a.match(t5) and poster_3d == 'true':
pass
elif poster_3d != 'true':
pass
else:
raise ValueError('Please make sure that your scheduled times are written in the format HH:MM')
if a.match(t2) and Disney == 'true':
pass
elif Disney != 'true':
pass
else:
raise ValueError('Please make sure that your scheduled times are written in the format HH:MM')
if a.match(t3) and Pixar == 'true':
pass
elif Pixar != 'true':
pass
else:
raise ValueError('Please make sure that your scheduled times are written in the format HH:MM')
if a.match(t4) and hide_4k == 'true':
pass
elif hide_4k != 'true':
pass
else:
raise ValueError('Please make sure that your scheduled times are written in the format HH:MM')
print('Config check passed')
p = Popen('python -u ./run_all.py', shell=True)
output = p.communicate()
print(output[0])
| [
"plexapi.server.PlexServer",
"urllib.parse.urlparse",
"configparser.ConfigParser",
"re.compile",
"subprocess.Popen",
"socket.inet_aton"
] | [((386, 400), 'configparser.ConfigParser', 'ConfigParser', ([], {}), '()\n', (398, 400), False, 'from configparser import ConfigParser\n'), ((3570, 3603), 're.compile', 're.compile', (['"""^[0-9]{2}:[0-9]{2}$"""'], {}), "('^[0-9]{2}:[0-9]{2}$')\n", (3580, 3603), False, 'import re\n'), ((4619, 4662), 'subprocess.Popen', 'Popen', (['"""python -u ./run_all.py"""'], {'shell': '(True)'}), "('python -u ./run_all.py', shell=True)\n", (4624, 4662), False, 'from subprocess import Popen, PIPE, STDOUT\n'), ((1218, 1252), 'urllib.parse.urlparse', 'parse.urlparse', (["server['PLEX_URL']"], {}), "(server['PLEX_URL'])\n", (1232, 1252), False, 'from urllib import parse\n'), ((1329, 1350), 'socket.inet_aton', 'socket.inet_aton', (['url'], {}), '(url)\n', (1345, 1350), False, 'import socket\n'), ((1280, 1314), 'urllib.parse.urlparse', 'parse.urlparse', (["server['PLEX_URL']"], {}), "(server['PLEX_URL'])\n", (1294, 1314), False, 'from urllib import parse\n'), ((1684, 1731), 'plexapi.server.PlexServer', 'PlexServer', (["server['PLEX_URL']", "server['TOKEN']"], {}), "(server['PLEX_URL'], server['TOKEN'])\n", (1694, 1731), False, 'from plexapi.server import PlexServer\n')] |
from django.db import models
from account.models import User
from django.utils import timezone
from django.utils.text import slugify
class Post(models.Model):
author = models.ForeignKey(to=User,
on_delete=models.SET_NULL,
related_name='feed_posts',
null=True
)
title = models.CharField(max_length=140,blank=False,null=False)
body = models.TextField(max_length=250,blank=False,null=False)
slug = models.SlugField(max_length=250,unique_for_date='created')
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
class Meta:
ordering = ('-created','-updated')
def __str__(self):
return self.title
def save(self, *args, **kwargs):
if not self.slug:
self.slug = slugify(self.title)
super().save(*args, **kwargs)
class Comment(models.Model):
post = models.ForeignKey(Post, related_name='comments', on_delete=models.CASCADE)
author = models.ForeignKey(User, on_delete=models.CASCADE)
body = models.TextField()
created = models.DateTimeField(auto_now_add=timezone.now())
class Meta:
ordering = ('-created',)
def __str__(self):
return 'Comment by {} on {}'.format(self.author,self.post) | [
"django.utils.text.slugify",
"django.db.models.TextField",
"django.db.models.ForeignKey",
"django.utils.timezone.now",
"django.db.models.SlugField",
"django.db.models.DateTimeField",
"django.db.models.CharField"
] | [((175, 271), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': 'User', 'on_delete': 'models.SET_NULL', 'related_name': '"""feed_posts"""', 'null': '(True)'}), "(to=User, on_delete=models.SET_NULL, related_name=\n 'feed_posts', null=True)\n", (192, 271), False, 'from django.db import models\n'), ((404, 461), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(140)', 'blank': '(False)', 'null': '(False)'}), '(max_length=140, blank=False, null=False)\n', (420, 461), False, 'from django.db import models\n'), ((471, 528), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(250)', 'blank': '(False)', 'null': '(False)'}), '(max_length=250, blank=False, null=False)\n', (487, 528), False, 'from django.db import models\n'), ((538, 597), 'django.db.models.SlugField', 'models.SlugField', ([], {'max_length': '(250)', 'unique_for_date': '"""created"""'}), "(max_length=250, unique_for_date='created')\n", (554, 597), False, 'from django.db import models\n'), ((611, 650), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (631, 650), False, 'from django.db import models\n'), ((665, 700), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (685, 700), False, 'from django.db import models\n'), ((999, 1073), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Post'], {'related_name': '"""comments"""', 'on_delete': 'models.CASCADE'}), "(Post, related_name='comments', on_delete=models.CASCADE)\n", (1016, 1073), False, 'from django.db import models\n'), ((1087, 1136), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.CASCADE'}), '(User, on_delete=models.CASCADE)\n', (1104, 1136), False, 'from django.db import models\n'), ((1148, 1166), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (1164, 1166), False, 'from django.db import models\n'), ((899, 918), 'django.utils.text.slugify', 'slugify', (['self.title'], {}), '(self.title)\n', (906, 918), False, 'from django.utils.text import slugify\n'), ((1215, 1229), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (1227, 1229), False, 'from django.utils import timezone\n')] |
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='cellspatialite',
version='1.0.0',
packages=['cellspatialite', 'cellspatialite.test'],
author='Mathieu',
description='cellspatialite',
install_requires= ['pysqlite', 'pandas', 'docopt'],
license='MIT',
entry_points = {
'console_scripts': [
'cellspatialite = cellspatialite.cellspatialite:main',
],
},
) | [
"setuptools.setup"
] | [((68, 387), 'setuptools.setup', 'setup', ([], {'name': '"""cellspatialite"""', 'version': '"""1.0.0"""', 'packages': "['cellspatialite', 'cellspatialite.test']", 'author': '"""Mathieu"""', 'description': '"""cellspatialite"""', 'install_requires': "['pysqlite', 'pandas', 'docopt']", 'license': '"""MIT"""', 'entry_points': "{'console_scripts': ['cellspatialite = cellspatialite.cellspatialite:main']}"}), "(name='cellspatialite', version='1.0.0', packages=['cellspatialite',\n 'cellspatialite.test'], author='Mathieu', description='cellspatialite',\n install_requires=['pysqlite', 'pandas', 'docopt'], license='MIT',\n entry_points={'console_scripts': [\n 'cellspatialite = cellspatialite.cellspatialite:main']})\n", (73, 387), False, 'from setuptools import setup, find_packages\n')] |
import os
import sys
import glob
import tensorflow as tf
import keras
from keras.datasets import mnist
from keras.layers import Dense, Flatten, Dropout
from keras.layers import Conv2D, MaxPooling2D
from keras.models import Sequential
from keras.models import Model
from keras.layers import Dense, GlobalAveragePooling2D, Dropout
from keras import backend as K
from keras.optimizers import SGD
import matplotlib.pylab as plt
import load_data as ld
from keras.callbacks import TensorBoard
tensorboard = TensorBoard(log_dir='./logs', histogram_freq=0,
write_graph=True, write_images=False)
batch_size = 200
epochs = 50
width = 72
height = 40
train_generator = ld.load_data(width, height, 'train', batch_size)
test_generator = ld.load_data(width, height, 'test', batch_size)
input_shape = (width, height, 3)
gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.75)
class AccuracyHistory(keras.callbacks.Callback):
def on_train_begin(self, logs={}):
self.acc = []
def on_epoch_end(self, batch, logs={}):
self.acc.append(logs.get('acc'))
with tf.Session(config = tf.ConfigProto(gpu_options=gpu_options)) as sess:
K.set_session(sess)
model = Sequential()
model.add(Conv2D(64, kernel_size=(5, 5), strides=(1, 1),
activation='relu',
input_shape=input_shape))
model.add(Conv2D(128, kernel_size=(5, 5), strides=(1, 1),
activation='relu',
input_shape=input_shape))
model.add(MaxPooling2D())
model.add(Conv2D(128, kernel_size=(5, 5), strides=(1, 1),
activation='relu',
input_shape=input_shape))
model.add(Conv2D(128, kernel_size=(5, 5), strides=(1, 1),
activation='relu',
input_shape=input_shape))
model.add(MaxPooling2D())
model.add(Flatten())
model.add(Dense(800, activation='relu'))
model.add(Dense(800, activation='relu'))
model.add(Dropout(0.25))
model.add(Dense(2, activation='softmax'))
model.compile(loss=keras.losses.categorical_crossentropy,
optimizer=keras.optimizers.Adam(),
metrics=['accuracy'])
history = AccuracyHistory()
model.fit_generator(train_generator,
epochs = epochs,
validation_data=test_generator,
class_weight='auto',
verbose=1,
callbacks=[history, tensorboard])
model.save('keras.model')
score = model.evaluate_generator(test_generator)
print('Test loss:', score[0])
print('Test accuracy:', score[1])
plt.plot(range(1, 51), history.acc)
plt.xlabel('Epochs')
plt.ylabel('Accuracy')
plt.show() | [
"keras.optimizers.Adam",
"keras.layers.Conv2D",
"keras.layers.Flatten",
"keras.layers.MaxPooling2D",
"keras.backend.set_session",
"matplotlib.pylab.xlabel",
"keras.callbacks.TensorBoard",
"keras.models.Sequential",
"keras.layers.Dropout",
"load_data.load_data",
"matplotlib.pylab.show",
"keras.... | [((502, 591), 'keras.callbacks.TensorBoard', 'TensorBoard', ([], {'log_dir': '"""./logs"""', 'histogram_freq': '(0)', 'write_graph': '(True)', 'write_images': '(False)'}), "(log_dir='./logs', histogram_freq=0, write_graph=True,\n write_images=False)\n", (513, 591), False, 'from keras.callbacks import TensorBoard\n'), ((686, 734), 'load_data.load_data', 'ld.load_data', (['width', 'height', '"""train"""', 'batch_size'], {}), "(width, height, 'train', batch_size)\n", (698, 734), True, 'import load_data as ld\n'), ((752, 799), 'load_data.load_data', 'ld.load_data', (['width', 'height', '"""test"""', 'batch_size'], {}), "(width, height, 'test', batch_size)\n", (764, 799), True, 'import load_data as ld\n'), ((848, 899), 'tensorflow.GPUOptions', 'tf.GPUOptions', ([], {'per_process_gpu_memory_fraction': '(0.75)'}), '(per_process_gpu_memory_fraction=0.75)\n', (861, 899), True, 'import tensorflow as tf\n'), ((2660, 2680), 'matplotlib.pylab.xlabel', 'plt.xlabel', (['"""Epochs"""'], {}), "('Epochs')\n", (2670, 2680), True, 'import matplotlib.pylab as plt\n'), ((2681, 2703), 'matplotlib.pylab.ylabel', 'plt.ylabel', (['"""Accuracy"""'], {}), "('Accuracy')\n", (2691, 2703), True, 'import matplotlib.pylab as plt\n'), ((2704, 2714), 'matplotlib.pylab.show', 'plt.show', ([], {}), '()\n', (2712, 2714), True, 'import matplotlib.pylab as plt\n'), ((1177, 1196), 'keras.backend.set_session', 'K.set_session', (['sess'], {}), '(sess)\n', (1190, 1196), True, 'from keras import backend as K\n'), ((1210, 1222), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (1220, 1222), False, 'from keras.models import Sequential\n'), ((1237, 1331), 'keras.layers.Conv2D', 'Conv2D', (['(64)'], {'kernel_size': '(5, 5)', 'strides': '(1, 1)', 'activation': '"""relu"""', 'input_shape': 'input_shape'}), "(64, kernel_size=(5, 5), strides=(1, 1), activation='relu',\n input_shape=input_shape)\n", (1243, 1331), False, 'from keras.layers import Conv2D, MaxPooling2D\n'), ((1383, 1478), 'keras.layers.Conv2D', 'Conv2D', (['(128)'], {'kernel_size': '(5, 5)', 'strides': '(1, 1)', 'activation': '"""relu"""', 'input_shape': 'input_shape'}), "(128, kernel_size=(5, 5), strides=(1, 1), activation='relu',\n input_shape=input_shape)\n", (1389, 1478), False, 'from keras.layers import Conv2D, MaxPooling2D\n'), ((1530, 1544), 'keras.layers.MaxPooling2D', 'MaxPooling2D', ([], {}), '()\n', (1542, 1544), False, 'from keras.layers import Conv2D, MaxPooling2D\n'), ((1561, 1656), 'keras.layers.Conv2D', 'Conv2D', (['(128)'], {'kernel_size': '(5, 5)', 'strides': '(1, 1)', 'activation': '"""relu"""', 'input_shape': 'input_shape'}), "(128, kernel_size=(5, 5), strides=(1, 1), activation='relu',\n input_shape=input_shape)\n", (1567, 1656), False, 'from keras.layers import Conv2D, MaxPooling2D\n'), ((1708, 1803), 'keras.layers.Conv2D', 'Conv2D', (['(128)'], {'kernel_size': '(5, 5)', 'strides': '(1, 1)', 'activation': '"""relu"""', 'input_shape': 'input_shape'}), "(128, kernel_size=(5, 5), strides=(1, 1), activation='relu',\n input_shape=input_shape)\n", (1714, 1803), False, 'from keras.layers import Conv2D, MaxPooling2D\n'), ((1855, 1869), 'keras.layers.MaxPooling2D', 'MaxPooling2D', ([], {}), '()\n', (1867, 1869), False, 'from keras.layers import Conv2D, MaxPooling2D\n'), ((1894, 1903), 'keras.layers.Flatten', 'Flatten', ([], {}), '()\n', (1901, 1903), False, 'from keras.layers import Dense, Flatten, Dropout\n'), ((1919, 1948), 'keras.layers.Dense', 'Dense', (['(800)'], {'activation': '"""relu"""'}), "(800, activation='relu')\n", (1924, 1948), False, 'from keras.layers import Dense, GlobalAveragePooling2D, Dropout\n'), ((1964, 1993), 'keras.layers.Dense', 'Dense', (['(800)'], {'activation': '"""relu"""'}), "(800, activation='relu')\n", (1969, 1993), False, 'from keras.layers import Dense, GlobalAveragePooling2D, Dropout\n'), ((2009, 2022), 'keras.layers.Dropout', 'Dropout', (['(0.25)'], {}), '(0.25)\n', (2016, 2022), False, 'from keras.layers import Dense, GlobalAveragePooling2D, Dropout\n'), ((2038, 2068), 'keras.layers.Dense', 'Dense', (['(2)'], {'activation': '"""softmax"""'}), "(2, activation='softmax')\n", (2043, 2068), False, 'from keras.layers import Dense, GlobalAveragePooling2D, Dropout\n'), ((1123, 1162), 'tensorflow.ConfigProto', 'tf.ConfigProto', ([], {'gpu_options': 'gpu_options'}), '(gpu_options=gpu_options)\n', (1137, 1162), True, 'import tensorflow as tf\n'), ((2159, 2182), 'keras.optimizers.Adam', 'keras.optimizers.Adam', ([], {}), '()\n', (2180, 2182), False, 'import keras\n')] |
#!/usr/bin/env python3
#encoding=utf-8
#------------------------------------------------------
# Usage: python3 use_module2.py
# Description: module basic
#------------------------------------------------------
import module2
print(module2.sys)
print(module2.name)
print(module2.klass)
print('The dict of module2 is: ')
print(list(module2.__dict__.keys()))
print('The dict of module2 without __xxx__ is: ')
print([x for x in module2.__dict__.keys() if not x.startswith('__')])
| [
"module2.__dict__.keys"
] | [((338, 361), 'module2.__dict__.keys', 'module2.__dict__.keys', ([], {}), '()\n', (359, 361), False, 'import module2\n'), ((433, 456), 'module2.__dict__.keys', 'module2.__dict__.keys', ([], {}), '()\n', (454, 456), False, 'import module2\n')] |
#!/usr/bin/env python
from __future__ import with_statement
from __future__ import print_function
import sys
import os
import Cookie
import cgi
import common
import cgitb
cgitb.enable()
EMAIL_VARNAME = 'openid.ext1.value.email'
# Are we using basic auth?
auth_type, email = common.web.get_auth_type()
if auth_type == 'OpenID':
# Get form and cookie data
form = cgi.FieldStorage()
email = form.getfirst(EMAIL_VARNAME)
in_cookies = Cookie.Cookie()
in_cookies.load(os.environ[common.HTTP_COOKIE])
openid_session = in_cookies[common.OPENID_SESSION_COOKIE_NAME].value
sdb = common.SessionDatabase()
sdb.load()
if not email:
if openid_session in sdb.db:
email = sdb.db[openid_session]
# Check email
udb = common.UserDatabase()
users = udb.get_users()
# Force email to lower case for comparison to users list, which we
# lower-cased when loading.
if email:
email = email.lower()
if email not in users:
# print ("openid_session %s" % openid_session)
if email:
common.print_http_header()
print("Access Denied ... '%s' not in users<br>" % (email))
sys.exit(0)
elif auth_type == 'OpenID':
out_cookies = Cookie.SmartCookie()
out_cookies[common.OPENID_SESSION_COOKIE_NAME] = ''
out_cookies[common.OPENID_SESSION_COOKIE_NAME]['path'] = '/cloudsim/inside/cgi-bin/'
print(out_cookies)
common.print_http_header()
print("""
Your open session ID is not associated with a user. Please login again<br>
<a href="/cloudsim/index.html">login</a>
""")
sys.exit(0)
# Save session ID and email to our own database
if auth_type == 'OpenID':
sdb.db[openid_session] = email
sdb.save()
# redirect to the console now
common.print_http_header()
print ('<meta http-equiv="refresh" content="0; url=/cloudsim/inside/cgi-bin/console">')
| [
"cgi.FieldStorage",
"sys.exit",
"Cookie.Cookie",
"Cookie.SmartCookie",
"common.web.get_auth_type",
"common.UserDatabase",
"common.SessionDatabase",
"cgitb.enable",
"common.print_http_header"
] | [((172, 186), 'cgitb.enable', 'cgitb.enable', ([], {}), '()\n', (184, 186), False, 'import cgitb\n'), ((277, 303), 'common.web.get_auth_type', 'common.web.get_auth_type', ([], {}), '()\n', (301, 303), False, 'import common\n'), ((762, 783), 'common.UserDatabase', 'common.UserDatabase', ([], {}), '()\n', (781, 783), False, 'import common\n'), ((1781, 1807), 'common.print_http_header', 'common.print_http_header', ([], {}), '()\n', (1805, 1807), False, 'import common\n'), ((373, 391), 'cgi.FieldStorage', 'cgi.FieldStorage', ([], {}), '()\n', (389, 391), False, 'import cgi\n'), ((450, 465), 'Cookie.Cookie', 'Cookie.Cookie', ([], {}), '()\n', (463, 465), False, 'import Cookie\n'), ((601, 625), 'common.SessionDatabase', 'common.SessionDatabase', ([], {}), '()\n', (623, 625), False, 'import common\n'), ((1038, 1064), 'common.print_http_header', 'common.print_http_header', ([], {}), '()\n', (1062, 1064), False, 'import common\n'), ((1140, 1151), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (1148, 1151), False, 'import sys\n'), ((1206, 1226), 'Cookie.SmartCookie', 'Cookie.SmartCookie', ([], {}), '()\n', (1224, 1226), False, 'import Cookie\n'), ((1415, 1441), 'common.print_http_header', 'common.print_http_header', ([], {}), '()\n', (1439, 1441), False, 'import common\n'), ((1613, 1624), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (1621, 1624), False, 'import sys\n')] |
"""utilities for watching worker pools"""
from collections.abc import Callable, Mapping
import logging
import time
from types import MappingProxyType
from typing import Optional, TYPE_CHECKING, Union
if TYPE_CHECKING:
from multiprocessing import Pool
from pathos.multiprocessing import ProcessPool
class ChangeReporter:
def __init__(self, mapping: Mapping):
self.state = MappingProxyType(mapping)
self.reference = mapping
def check(self, new_state: Mapping):
return {
key: value
for key, value in new_state.items()
if self.state.get(key) != new_state.get(key)
}
def update(self, new_state: Mapping):
changes = self.check(new_state)
self.state = MappingProxyType(new_state)
return changes
def query(self):
return self.update(self.reference)
def watch_pool(
result_map, interval: float = 1, callback: Optional[Callable] = None
):
in_readiness = {
task: result.ready() for task, result in result_map.items()
}
task_report = ChangeReporter(in_readiness)
while not all(in_readiness.values()):
in_readiness = {
task_ix: result.ready() for task_ix, result in result_map.items()
}
report = task_report.update(in_readiness)
if callback is not None:
callback(report)
time.sleep(interval)
return result_map
def simple_log_callback(
logger: Optional[logging.Logger] = None, prefix: str = ""
):
if logger is None:
logger = logging.getLogger(__name__)
def log_changed_keys(report):
for key in report.keys():
logger.info(prefix + key)
return log_changed_keys
def wait_for_it(
pool: Union["ProcessPool", "Pool"],
results: Mapping,
log: logging.Logger = None,
message: str = None,
callback: Optional[Callable] = None,
interval: float = 0.1,
as_dict: bool = False,
) -> Union[dict, list]:
if (callback is None) and (log is not None):
callback = simple_log_callback(log, message)
pool.close()
if results is not None:
watch_pool(results, interval, callback)
pool.join()
if as_dict:
return {key: result.get() for key, result in results.items()}
return [result.get() for result in results.values()]
| [
"types.MappingProxyType",
"logging.getLogger",
"time.sleep"
] | [((394, 419), 'types.MappingProxyType', 'MappingProxyType', (['mapping'], {}), '(mapping)\n', (410, 419), False, 'from types import MappingProxyType\n'), ((754, 781), 'types.MappingProxyType', 'MappingProxyType', (['new_state'], {}), '(new_state)\n', (770, 781), False, 'from types import MappingProxyType\n'), ((1381, 1401), 'time.sleep', 'time.sleep', (['interval'], {}), '(interval)\n', (1391, 1401), False, 'import time\n'), ((1556, 1583), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1573, 1583), False, 'import logging\n')] |
#!/usr/bin/env python3
"""Defines a :class:`Request`."""
import enum
import numbers
from schedsi.cpu import context
Type = enum.Enum('Type', ['current_time', 'resume_chain', 'idle', 'execute', 'timer'])
class Request:
"""A request to the CPU."""
def __init__(self, rtype, arg):
"""Create a :class:`Request`."""
if rtype == Type.current_time:
assert arg is None
elif rtype == Type.resume_chain:
assert isinstance(arg, context.Chain)
elif rtype in (Type.idle, Type.execute, Type.timer):
assert arg is None or isinstance(arg, numbers.Rational) and arg > 0
else:
assert False, 'Unknown Type'
self.rtype = rtype
self.arg = arg
@classmethod
def current_time(cls):
"""Create a :class:`Request` to get the current time.
The CPU will not spend any virtual time doing this.
"""
return cls(Type.current_time, None)
@classmethod
def resume_chain(cls, chain):
"""Create a :class:`Request` to resume a :class:`context.Chain <schedsi.context.Chain>`."""
return cls(Type.resume_chain, chain)
@classmethod
def idle(cls):
"""Create a :class:`Request` to idle."""
return cls(Type.idle, None)
@classmethod
def execute(cls, amount):
"""Create a :class:`Request` to spend some time executing."""
return cls(Type.execute, amount)
@classmethod
def timer(cls, time):
"""Create a :class:`Request` to set a timer for the current context."""
return cls(Type.timer, time)
| [
"enum.Enum"
] | [((125, 204), 'enum.Enum', 'enum.Enum', (['"""Type"""', "['current_time', 'resume_chain', 'idle', 'execute', 'timer']"], {}), "('Type', ['current_time', 'resume_chain', 'idle', 'execute', 'timer'])\n", (134, 204), False, 'import enum\n')] |
import os
import sys
import tempfile
import subprocess
import cv2
import pymesh
import numpy as np
import torch
import triangle as tr
from tridepth import BaseMesh
from tridepth.extractor import calculate_canny_edges
from tridepth.extractor import SVGReader
from tridepth.extractor import resolve_self_intersection, cleanup
from tridepth.extractor import add_frame
class Mesh2DExtractor:
def __init__(self, canny_params={"denoise": False}, at_params={"filter_itr": 4, "error_thresh": 0.01}):
self.canny_params = canny_params # TODO
self.autotrace_cmd = ['autotrace',
'--centerline',
'--remove-adjacent-corners',
'--filter-iterations', str(at_params["filter_itr"]),
'--error-threshold', str(at_params["error_thresh"]),
'--input-format=bmp',
'--output-format=svg']
def _execute_autotrace(self, filename, debug=False):
"""Execute autotrace with input (bmp-file)
- https://github.com/autotrace/autotrace
Returns:
svg_string: string starting from '<svg/>'
"""
# Execute autotrace
p = subprocess.Popen(self.autotrace_cmd + [filename], stdout=subprocess.PIPE)
# Read the converted svg contents
svg_string = p.communicate()[0]
if not len(svg_string):
print("autotrace_cmd: " + ' '.join(self.autotrace_cmd + [filename]), file=sys.stderr)
print("ERROR: returned nothing, leaving tmp bmp file around for you to debug", file=sys.stderr)
sys.exit(1)
else:
if debug:
print(filename)
sys.exit(1)
else:
os.unlink(filename) # Remove the tempolary file
return svg_string
def _read_polygon_from_svg(self, svg_string):
"""
"""
# Extract polygon information from svg-string
# - https://github.com/guyc/scadtrace/blob/master/svg.py
svg_reader = SVGReader(svg_string)
verts_2d, edges = svg_reader.run()
# Store polygons as wire-format (w/ cleaning)
# - https://github.com/PyMesh/PyMesh/blob/master/scripts/svg_to_mesh.py
if verts_2d.shape[0] == 0:
wires = pymesh.wires.WireNetwork.create_empty()
else:
wires = pymesh.wires.WireNetwork.create_from_data(verts_2d, edges)
wires = resolve_self_intersection(wires, min_edge_size=1.5)
wires = cleanup(wires)
return wires
def _triangulation(self, np_edge, wires, output_size, debug=False):
"""
"""
height, width = output_size
# We use cython wrapper of Triangle,
# since other implementations (Pymesh) can't output edges :(
# - https://github.com/drufat/triangle
input_dic = {}
input_dic["vertices"] = wires.vertices.copy()
input_dic["segments"] = wires.edges.copy()
# [Options]
# p: Triangulates a Planar Straight Line Graph.
# q: no angles smaller than 20 degrees
try:
t = tr.triangulate(input_dic, 'pq')
except:
import uuid
unique_filename = str(uuid.uuid4()) + ".png"
print(wires.vertices.shape, wires.edges.shape)
cv2.imwrite(unique_filename, np_edge)
exit()
if debug:
import matplotlib.pyplot as plt
plt.gca().invert_yaxis()
# plt.imshow(np_edge)
for edge in wires.edges:
v1x, v1y = wires.vertices[edge[0]]
v2x, v2y = wires.vertices[edge[1]]
plt.plot([v1x, v2x], [v1y, v2y], 'k-', color='r', linewidth=1.0)
for tri in t['triangles']:
v1x, v1y = t['vertices'][tri[0]]
v2x, v2y = t['vertices'][tri[1]]
v3x, v3y = t['vertices'][tri[2]]
plt.plot([v1x, v2x], [v1y, v2y], 'k-', color='black', linewidth=1.0)
plt.plot([v2x, v3x], [v2y, v3y], 'k-', color='black', linewidth=1.0)
plt.plot([v3x, v1x], [v3y, v1y], 'k-', color='black', linewidth=1.0)
plt.scatter(wires.vertices[:, 0], wires.vertices[:, 1], s=3.0, c="black")
plt.show()
print(t['vertices'].shape, t['triangles'].shape)
exit()
# Normalize (range=[0,1])
vertices = t["vertices"]
t["vertices"] = np.concatenate((vertices[:, :1] / width,
vertices[:, 1:2] / height,
vertices[:, 2:]), 1)
t["edgemap"] = np_edge
return t
def __call__(self, np_scene):
"""
Args:
np_scene: [H,W,3] (ndarray, uint8)
"""
height, width, _ = np_scene.shape
# Calculate canny edge
np_edge, _ = calculate_canny_edges(np_scene, denoise=self.canny_params["denoise"])
# Save into temp file as bmp-format
with tempfile.NamedTemporaryFile(suffix='.bmp', delete=False) as temp:
cv2.imwrite(temp.name, np_edge)
# Execute vectorization (by Autotrace)
svg_string = self._execute_autotrace(temp.name)
# Extract polygon information
wires = self._read_polygon_from_svg(svg_string)
# Triangulation
wires = add_frame(wires, output_size=(height, width))
mesh_dic = self._triangulation(np_edge, wires, output_size=(height, width))
# Finally integrate all the information, and create disconnected mesh
mesh = BaseMesh(mesh_dic)
return mesh
| [
"pymesh.wires.WireNetwork.create_empty",
"pymesh.wires.WireNetwork.create_from_data",
"triangle.triangulate",
"tridepth.BaseMesh",
"sys.exit",
"subprocess.Popen",
"tridepth.extractor.calculate_canny_edges",
"matplotlib.pyplot.plot",
"os.unlink",
"numpy.concatenate",
"tempfile.NamedTemporaryFile"... | [((1252, 1325), 'subprocess.Popen', 'subprocess.Popen', (['(self.autotrace_cmd + [filename])'], {'stdout': 'subprocess.PIPE'}), '(self.autotrace_cmd + [filename], stdout=subprocess.PIPE)\n', (1268, 1325), False, 'import subprocess\n'), ((2094, 2115), 'tridepth.extractor.SVGReader', 'SVGReader', (['svg_string'], {}), '(svg_string)\n', (2103, 2115), False, 'from tridepth.extractor import SVGReader\n'), ((4529, 4621), 'numpy.concatenate', 'np.concatenate', (['(vertices[:, :1] / width, vertices[:, 1:2] / height, vertices[:, 2:])', '(1)'], {}), '((vertices[:, :1] / width, vertices[:, 1:2] / height,\n vertices[:, 2:]), 1)\n', (4543, 4621), True, 'import numpy as np\n'), ((4962, 5031), 'tridepth.extractor.calculate_canny_edges', 'calculate_canny_edges', (['np_scene'], {'denoise': "self.canny_params['denoise']"}), "(np_scene, denoise=self.canny_params['denoise'])\n", (4983, 5031), False, 'from tridepth.extractor import calculate_canny_edges\n'), ((5440, 5485), 'tridepth.extractor.add_frame', 'add_frame', (['wires'], {'output_size': '(height, width)'}), '(wires, output_size=(height, width))\n', (5449, 5485), False, 'from tridepth.extractor import add_frame\n'), ((5664, 5682), 'tridepth.BaseMesh', 'BaseMesh', (['mesh_dic'], {}), '(mesh_dic)\n', (5672, 5682), False, 'from tridepth import BaseMesh\n'), ((1659, 1670), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1667, 1670), False, 'import sys\n'), ((2351, 2390), 'pymesh.wires.WireNetwork.create_empty', 'pymesh.wires.WireNetwork.create_empty', ([], {}), '()\n', (2388, 2390), False, 'import pymesh\n'), ((2425, 2483), 'pymesh.wires.WireNetwork.create_from_data', 'pymesh.wires.WireNetwork.create_from_data', (['verts_2d', 'edges'], {}), '(verts_2d, edges)\n', (2466, 2483), False, 'import pymesh\n'), ((2504, 2555), 'tridepth.extractor.resolve_self_intersection', 'resolve_self_intersection', (['wires'], {'min_edge_size': '(1.5)'}), '(wires, min_edge_size=1.5)\n', (2529, 2555), False, 'from tridepth.extractor import resolve_self_intersection, cleanup\n'), ((2576, 2590), 'tridepth.extractor.cleanup', 'cleanup', (['wires'], {}), '(wires)\n', (2583, 2590), False, 'from tridepth.extractor import resolve_self_intersection, cleanup\n'), ((3194, 3225), 'triangle.triangulate', 'tr.triangulate', (['input_dic', '"""pq"""'], {}), "(input_dic, 'pq')\n", (3208, 3225), True, 'import triangle as tr\n'), ((4260, 4333), 'matplotlib.pyplot.scatter', 'plt.scatter', (['wires.vertices[:, 0]', 'wires.vertices[:, 1]'], {'s': '(3.0)', 'c': '"""black"""'}), "(wires.vertices[:, 0], wires.vertices[:, 1], s=3.0, c='black')\n", (4271, 4333), True, 'import matplotlib.pyplot as plt\n'), ((4346, 4356), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4354, 4356), True, 'import matplotlib.pyplot as plt\n'), ((5090, 5146), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {'suffix': '""".bmp"""', 'delete': '(False)'}), "(suffix='.bmp', delete=False)\n", (5117, 5146), False, 'import tempfile\n'), ((5168, 5199), 'cv2.imwrite', 'cv2.imwrite', (['temp.name', 'np_edge'], {}), '(temp.name, np_edge)\n', (5179, 5199), False, 'import cv2\n'), ((1755, 1766), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1763, 1766), False, 'import sys\n'), ((1801, 1820), 'os.unlink', 'os.unlink', (['filename'], {}), '(filename)\n', (1810, 1820), False, 'import os\n'), ((3394, 3431), 'cv2.imwrite', 'cv2.imwrite', (['unique_filename', 'np_edge'], {}), '(unique_filename, np_edge)\n', (3405, 3431), False, 'import cv2\n'), ((3740, 3804), 'matplotlib.pyplot.plot', 'plt.plot', (['[v1x, v2x]', '[v1y, v2y]', '"""k-"""'], {'color': '"""r"""', 'linewidth': '(1.0)'}), "([v1x, v2x], [v1y, v2y], 'k-', color='r', linewidth=1.0)\n", (3748, 3804), True, 'import matplotlib.pyplot as plt\n'), ((4008, 4076), 'matplotlib.pyplot.plot', 'plt.plot', (['[v1x, v2x]', '[v1y, v2y]', '"""k-"""'], {'color': '"""black"""', 'linewidth': '(1.0)'}), "([v1x, v2x], [v1y, v2y], 'k-', color='black', linewidth=1.0)\n", (4016, 4076), True, 'import matplotlib.pyplot as plt\n'), ((4093, 4161), 'matplotlib.pyplot.plot', 'plt.plot', (['[v2x, v3x]', '[v2y, v3y]', '"""k-"""'], {'color': '"""black"""', 'linewidth': '(1.0)'}), "([v2x, v3x], [v2y, v3y], 'k-', color='black', linewidth=1.0)\n", (4101, 4161), True, 'import matplotlib.pyplot as plt\n'), ((4178, 4246), 'matplotlib.pyplot.plot', 'plt.plot', (['[v3x, v1x]', '[v3y, v1y]', '"""k-"""'], {'color': '"""black"""', 'linewidth': '(1.0)'}), "([v3x, v1x], [v3y, v1y], 'k-', color='black', linewidth=1.0)\n", (4186, 4246), True, 'import matplotlib.pyplot as plt\n'), ((3526, 3535), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (3533, 3535), True, 'import matplotlib.pyplot as plt\n'), ((3300, 3312), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (3310, 3312), False, 'import uuid\n')] |
"""initial_migration
Revision ID: 476b167aef80
Revises:
Create Date: 2019-01-03 17:03:37.684091
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '<KEY>'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
op.create_table('user',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('email', sa.String(length=255), nullable=True),
sa.Column('password', sa.String(length=255), nullable=True),
sa.Column('registration_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('email'),
sa.UniqueConstraint('name')
)
op.create_table('page',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(), nullable=False),
sa.Column('datetime', sa.DateTime(), nullable=True),
sa.Column('source', sa.String(), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
def downgrade():
op.drop_table('page')
op.drop_table('user')
| [
"sqlalchemy.ForeignKeyConstraint",
"sqlalchemy.DateTime",
"alembic.op.drop_table",
"sqlalchemy.PrimaryKeyConstraint",
"sqlalchemy.Integer",
"sqlalchemy.UniqueConstraint",
"sqlalchemy.String"
] | [((1135, 1156), 'alembic.op.drop_table', 'op.drop_table', (['"""page"""'], {}), "('page')\n", (1148, 1156), False, 'from alembic import op\n'), ((1161, 1182), 'alembic.op.drop_table', 'op.drop_table', (['"""user"""'], {}), "('user')\n", (1174, 1182), False, 'from alembic import op\n'), ((615, 644), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (638, 644), True, 'import sqlalchemy as sa\n'), ((650, 678), 'sqlalchemy.UniqueConstraint', 'sa.UniqueConstraint', (['"""email"""'], {}), "('email')\n", (669, 678), True, 'import sqlalchemy as sa\n'), ((684, 711), 'sqlalchemy.UniqueConstraint', 'sa.UniqueConstraint', (['"""name"""'], {}), "('name')\n", (703, 711), True, 'import sqlalchemy as sa\n'), ((1019, 1068), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['user_id']", "['user.id']"], {}), "(['user_id'], ['user.id'])\n", (1042, 1068), True, 'import sqlalchemy as sa\n'), ((1076, 1105), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (1099, 1105), True, 'import sqlalchemy as sa\n'), ((337, 349), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (347, 349), True, 'import sqlalchemy as sa\n'), ((390, 401), 'sqlalchemy.String', 'sa.String', ([], {}), '()\n', (399, 401), True, 'import sqlalchemy as sa\n'), ((443, 464), 'sqlalchemy.String', 'sa.String', ([], {'length': '(255)'}), '(length=255)\n', (452, 464), True, 'import sqlalchemy as sa\n'), ((508, 529), 'sqlalchemy.String', 'sa.String', ([], {'length': '(255)'}), '(length=255)\n', (517, 529), True, 'import sqlalchemy as sa\n'), ((580, 593), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (591, 593), True, 'import sqlalchemy as sa\n'), ((766, 778), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (776, 778), True, 'import sqlalchemy as sa\n'), ((820, 831), 'sqlalchemy.String', 'sa.String', ([], {}), '()\n', (829, 831), True, 'import sqlalchemy as sa\n'), ((876, 889), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (887, 889), True, 'import sqlalchemy as sa\n'), ((931, 942), 'sqlalchemy.String', 'sa.String', ([], {}), '()\n', (940, 942), True, 'import sqlalchemy as sa\n'), ((985, 997), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (995, 997), True, 'import sqlalchemy as sa\n')] |
"""
## Minería de textos
Universidad de Alicante, curso 2021-2022
Esta documentación forma parte de la práctica "[Lectura y documentación de un sistema de
extracción de entidades](https://jaspock.github.io/mtextos2122/bloque2_practica.html)" y se
basa en el código del curso [CS230](https://github.com/cs230-stanford/cs230-code-examples)
de la Universidad de Stanford.
**Autores de los comentarios:** <NAME> & <NAME>
Este módulo define la red neuronal, la función de pérdida y la métrica de aciertos
para la evaluación del modelo. Se hace uso de la libería torch.
"""
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
class Net(nn.Module):
"""
### Clase 'Net'
Definición de la clase red neuronal
"""
def __init__(self, params):
"""
### Constructor
Se define una red neuronal recurrente para la obtención de entidades
nombradas de un texto. Se compone de tres capas: capa lineal de embedding,
capa LSTM y capa 'fully-connceted'.
#### Parámetros:
* 'params': parámetros con 'vocab_size', 'embedding_dim' y 'lstm_hidden_dim'
#### Devuelve:
* Tres capas para la red nuronal
"""
"""
Llama al constructor de la clase 'Params', se construye su clase y a
continuación la clase hija 'Net'
"""
super(Net, self).__init__()
"""
Se le da el tamaño del vocabulario y las dimensiones del embedding
a la capa de embedding
"""
self.embedding = nn.Embedding(params.vocab_size, params.embedding_dim)
"""
Capa LSTM que recibe como parámetros las dimensiones del embedding
y las dimensiones del estado 'hidden' que no tienen porqué coincidir
batch_first = True -> hace que los tensores de entrada y salida se den
de forma batch,seq,feature
"""
self.lstm = nn.LSTM(params.embedding_dim,
params.lstm_hidden_dim, batch_first=True)
"""
Capa 'fully-connected', es la capa que da el output final, me dice la
probabilidad de que la palabra sea una ner (named entitty recognition) tag
de cierto tipo (nombre, tiempo, lugar...)
"""
self.fc = nn.Linear(params.lstm_hidden_dim, params.number_of_tags)
"""
En resumen la primera capa, dada una palabra, me da su embedding, en la segunda ese embedding
se lleva a otros espacio de embeddings que no tiene porque tener la misma dimension, y la tercera
capa se lleva este nuevo embedding a otro espacio, el número de etiqueta
"""
def forward(self, s):
"""
### Función 'forward'
A partir de un batch input obtiene las probablidades logits de los tokens
#### Parámetros:
* 's': argumento con un 'lote' de oraciones organizados en filas
y de dimensión tamaño del batch x longitud frase más larga. A las
frases más cortas se le aplica padding.
#### Devuelve:
* probabilidades logits de los tokens
"""
"""
aplicamos una capa de embedding
las dimensiones resultantes son(x,dimension de los embeddings)
"""
s = self.embedding(s)
"""
Aplicación de la LSTM
"""
s, _ = self.lstm(s)
"""
Se hace una copia del tensor en memoria
"""
s = s.contiguous()
"""
Cambiamos la forma de la variable s (es una matriz) de tal manera que cada fila tiene un token.
Con el -1 le indicamos que calcule la dimensión automáticamente para obtener dos dimensiones. Y el
s.shape[2] es lstm_hidden_dim. Se le pone el [2] porque el [0] es el tamaño de batch y el [1] es
el máximo de la secuencia
"""
s = s.view(-1, s.shape[2])
"""
Última capa 'fully-connected'proyecta el nuevo embedding hacia un espacio con el número de etqiuetas
"""
s = self.fc(s)
"""
No obstante, aun no tenemos probabilidades hay que aplicar una softmax. Por una mayor
eficiencia se aplica un log(softmax) por lo que las probabilidades de 0 a 1 pasan a ser
negativas. Cuanto más cerca estemos del cero más alta es la probabilidad.
"""
return F.log_softmax(s, dim=1)
def loss_fn(outputs, labels):
"""
### Función 'loss_fn'
Método función de pérdida
#### Parámetros:
* 'outputs': resultados del modelo
* 'labels': las etiqeutas para evaluar la pérdida
#### Devuelve:
* La entro`pía cruzada de todos los tokens, menos los de padding
"""
"""
aplana la variable
"""
labels = labels.view(-1)
"""
Los inputs de una red neuronal deben tener la misma forma y tamaño, para que esto sea así al pasar oraciones
se hace 'padding', que añade ceros a las secuencias o corta oraciones largas. Estos token tienen -1 como etiqueta,
por lo que podemos usar una máscara que los excluya del cálculo de la función de pérdida.
"""
mask = (labels >= 0).float()
"""
Conversión de las etiquetas en positivas (por los padding tokens)
"""
labels = labels % outputs.shape[1]
num_tokens = int(torch.sum(mask))
return -torch.sum(outputs[range(outputs.shape[0]), labels]*mask)/num_tokens
"""
Se devuelve la entropía cruzada de todos los tokens, menos los de padding, mediante el uso
de la variable 'mask' que hace de máscara, la cual hemos definido antes
"""
def accuracy(outputs, labels):
"""
### Función 'accuracy'
Cálculo de la precisión a partir de las etiquetas y las salidas teniendo en cuenta los términos
de padding
#### Parámetros:
* 'outputs': resultados del modelo
* 'labels': las etiqeutas para evaluar la pérdida
#### Devuelve:
* Tasa de acierto
"""
"""
Aplanamiento de la variable
"""
labels = labels.ravel()
"""
Máscara similar al anterior método 'loss_fn'
"""
mask = (labels >= 0)
"""
Índices con los mayores valores, es decir, obtención de las clases más probables de cada token
"""
outputs = np.argmax(outputs, axis=1)
return np.sum(outputs == labels)/float(np.sum(mask))
metrics = {
'accuracy': accuracy,
}
| [
"torch.nn.LSTM",
"numpy.argmax",
"numpy.sum",
"torch.sum",
"torch.nn.functional.log_softmax",
"torch.nn.Linear",
"torch.nn.Embedding"
] | [((6365, 6391), 'numpy.argmax', 'np.argmax', (['outputs'], {'axis': '(1)'}), '(outputs, axis=1)\n', (6374, 6391), True, 'import numpy as np\n'), ((1609, 1662), 'torch.nn.Embedding', 'nn.Embedding', (['params.vocab_size', 'params.embedding_dim'], {}), '(params.vocab_size, params.embedding_dim)\n', (1621, 1662), True, 'import torch.nn as nn\n'), ((1975, 2046), 'torch.nn.LSTM', 'nn.LSTM', (['params.embedding_dim', 'params.lstm_hidden_dim'], {'batch_first': '(True)'}), '(params.embedding_dim, params.lstm_hidden_dim, batch_first=True)\n', (1982, 2046), True, 'import torch.nn as nn\n'), ((2331, 2387), 'torch.nn.Linear', 'nn.Linear', (['params.lstm_hidden_dim', 'params.number_of_tags'], {}), '(params.lstm_hidden_dim, params.number_of_tags)\n', (2340, 2387), True, 'import torch.nn as nn\n'), ((4439, 4462), 'torch.nn.functional.log_softmax', 'F.log_softmax', (['s'], {'dim': '(1)'}), '(s, dim=1)\n', (4452, 4462), True, 'import torch.nn.functional as F\n'), ((5401, 5416), 'torch.sum', 'torch.sum', (['mask'], {}), '(mask)\n', (5410, 5416), False, 'import torch\n'), ((6405, 6430), 'numpy.sum', 'np.sum', (['(outputs == labels)'], {}), '(outputs == labels)\n', (6411, 6430), True, 'import numpy as np\n'), ((6437, 6449), 'numpy.sum', 'np.sum', (['mask'], {}), '(mask)\n', (6443, 6449), True, 'import numpy as np\n')] |
import pytest
from flask import Flask
from todo import db as _db
@pytest.fixture
def test_app():
"""Set up test Flask application."""
# Set up Flask app
app = Flask(__name__)
# We will create database in memory
# That way, we don't worry about after cleaning/removing it after running tests
app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///:memory:"
app.config["TESTING"] = True
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = True
_db.init_app(app)
with app.app_context():
# Create database tables
_db.create_all()
yield app
# Our database is stored in memory so the following line is not needed
# I'm using it to show you how to clean up after your tests
_db.drop_all()
| [
"todo.db.drop_all",
"todo.db.init_app",
"todo.db.create_all",
"flask.Flask"
] | [((174, 189), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (179, 189), False, 'from flask import Flask\n'), ((473, 490), 'todo.db.init_app', '_db.init_app', (['app'], {}), '(app)\n', (485, 490), True, 'from todo import db as _db\n'), ((561, 577), 'todo.db.create_all', '_db.create_all', ([], {}), '()\n', (575, 577), True, 'from todo import db as _db\n'), ((751, 765), 'todo.db.drop_all', '_db.drop_all', ([], {}), '()\n', (763, 765), True, 'from todo import db as _db\n')] |
#!/usr/bin/python
from __future__ import print_function
import argparse
import json
import os
import sys
from cli.settings import Settings
import contextlib
@contextlib.contextmanager
def chdir(dirname):
'''Withable chdir function that restores directory'''
curdir = os.getcwd()
try:
os.chdir(dirname)
yield
finally:
os.chdir(curdir)
def describe():
return 'creates an initial application template and a team config file.'
class RogerInit(object):
def parse_args(self):
self.parser = argparse.ArgumentParser(
prog='roger init', description=describe())
self.parser.add_argument('app_name', metavar='app_name',
help="application name unique within a project (or team). Examples: 'grafana', 'agora', 'crux:web'")
self.parser.add_argument('project_name', metavar='project_name',
help="project (or team) name. Examples: 'roger', 'content', 'kwe'")
self.parser.add_argument('-f', '--framework',
help="framework to deploy the application to. Defaults to marathon.'")
return self.parser
def writeJson(self, json, path, filename):
exists = os.path.exists(os.path.abspath(path))
if exists is False:
os.makedirs(path)
with open("{0}/{1}".format(path, filename), 'wb') as fh:
fh.write(json)
def createSlackTags(self):
slack_dict = {}
slack_dict['channel'] = "channel_id"
slack_dict['method'] = "chat.postMessage"
slack_dict['username'] = "project_name Deployment"
slack_dict['emoji'] = ":rocket:"
return slack_dict
def createVariableTags(self):
variables, global_dict, environment, dev_dict, stage_dict, prod_dict = [
{} for dummy in range(6)]
environment['dev'] = dev_dict
environment['stage'] = stage_dict
environment['prod'] = prod_dict
variables['global'] = global_dict
variables['environment'] = environment
return variables
def createAppTags(self, app_name, framework):
app_dict, details = {}, {}
details['name'] = app_name
details['template_path'] = "framework_template_path"
details['path'] = "dockerfile_path"
details['vars'] = self.createVariableTags()
if framework is not None:
details['framework'] = "{0}".format(framework)
container_list = ['{0}'.format(app_name)]
details['containers'] = container_list
app_dict['{0}'.format(app_name)] = details
return app_dict
def createAppConfig(self, config_dir, filename, app_name, project_name, framework):
path = "{0}".format(config_dir)
json_dict, app_dict = {}, {}
json_dict['name'] = project_name
json_dict['notifications'] = self.createSlackTags()
json_dict['repo'] = "repo_name"
json_dict['vars'] = self.createVariableTags()
app_dict = self.createAppTags(app_name, framework)
json_dict['apps'] = app_dict
json_output = json.dumps(json_dict, indent=2)
path = "{0}".format(config_dir)
self.writeJson(json_output, path, filename)
def createPortMappings(self):
port_dict = {}
port_dict['containerPort'] = 8125
port_dict['hostPort'] = 0
port_dict['servicePort'] = 0
port_dict['protocol'] = "tcp"
return port_dict
def createContainerTags(self):
container_dict, docker_dict = {}, {}
docker_dict['image'] = "{{ image }}"
docker_dict['network'] = "BRIDGE"
port_mappings = []
port_mappings.append(self.createPortMappings())
docker_dict['portMappings'] = port_mappings
container_dict['type'] = "DOCKER"
container_dict['docker'] = docker_dict
return container_dict
def createMarathonConfig(self, templ_dir, filename, app_id):
json_dict, env = {}, {}
json_dict['container'] = self.createContainerTags()
env['ENV_VAR1'] = "value1"
env['ENV_VAR2'] = "value2"
json_dict['id'] = app_id
json_dict['instances'] = 1
json_dict['cpus'] = 0.2
json_dict['mem'] = 1
json_dict['env'] = env
json_output = json.dumps(json_dict, indent=2)
self.writeJson(json_output, templ_dir, filename)
def main(self):
self.parser = self.parse_args()
args = self.parser.parse_args()
config_dir = settingObj.getConfigDir()
templ_dir = settingObj.getTemplatesDir()
config_file = "{0}.json".format(args.project_name)
file_exists = os.path.exists("{0}/{1}".format(config_dir, config_file))
if file_exists:
print(
"File {0} already exists in {1}/".format(config_file, config_dir))
else:
self.createAppConfig(
config_dir, config_file, args.app_name, args.project_name, args.framework)
print("Sample {0} application file {1} created under {2}/".format(
args.app_name, config_file, config_dir))
framework_filename = "{0}-{1}.json".format(
args.project_name, args.app_name)
file_exists = os.path.exists(
"{0}/{1}".format(templ_dir, framework_filename))
if file_exists:
print("File {0} already exists in {1}".format(
framework_filename, templ_dir))
else:
app_id = "{0}-{1}".format(args.project_name, args.app_name)
self.createMarathonConfig(templ_dir, framework_filename, app_id)
print("Sample Marathon file {0} created under {1}".format(
framework_filename, templ_dir))
if __name__ == "__main__":
settingObj = Settings()
roger_init = RogerInit()
roger_init.main()
| [
"os.makedirs",
"json.dumps",
"os.getcwd",
"os.chdir",
"os.path.abspath",
"cli.settings.Settings"
] | [((279, 290), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (288, 290), False, 'import os\n'), ((5801, 5811), 'cli.settings.Settings', 'Settings', ([], {}), '()\n', (5809, 5811), False, 'from cli.settings import Settings\n'), ((308, 325), 'os.chdir', 'os.chdir', (['dirname'], {}), '(dirname)\n', (316, 325), False, 'import os\n'), ((361, 377), 'os.chdir', 'os.chdir', (['curdir'], {}), '(curdir)\n', (369, 377), False, 'import os\n'), ((3127, 3158), 'json.dumps', 'json.dumps', (['json_dict'], {'indent': '(2)'}), '(json_dict, indent=2)\n', (3137, 3158), False, 'import json\n'), ((4317, 4348), 'json.dumps', 'json.dumps', (['json_dict'], {'indent': '(2)'}), '(json_dict, indent=2)\n', (4327, 4348), False, 'import json\n'), ((1267, 1288), 'os.path.abspath', 'os.path.abspath', (['path'], {}), '(path)\n', (1282, 1288), False, 'import os\n'), ((1330, 1347), 'os.makedirs', 'os.makedirs', (['path'], {}), '(path)\n', (1341, 1347), False, 'import os\n')] |
import unittest
from records_mover.records.targets.spectrum import SpectrumRecordsTarget
from records_mover.records.existing_table_handling import ExistingTableHandling
from mock import Mock, patch, MagicMock
class TestSpectrum(unittest.TestCase):
@patch('records_mover.records.targets.spectrum.ParquetRecordsFormat')
def setUp(self,
mock_ParquetRecordsFormat):
mock_schema_name = 'myschema'
mock_table_name = 'mytable'
mock_url_resolver = Mock(name='url_resolver')
mock_db_driver = Mock(name='db_driver')
mock_spectrum_base_url = Mock(name='spectrum_base_url')
self.mock_driver = mock_db_driver.return_value
self.mock_db = MagicMock(name='db')
self.mock_driver.db_engine = self.mock_db
self.mock_output_loc = mock_url_resolver.directory_url.return_value.\
directory_in_this_directory.return_value.\
directory_in_this_directory.return_value.\
directory_in_this_directory.return_value
self.mock_output_loc.url = 's3://output-loc/'
self.mock_output_loc.scheme = 's3'
self.records_format = mock_ParquetRecordsFormat.return_value
self.target =\
SpectrumRecordsTarget(schema_name=mock_schema_name,
table_name=mock_table_name,
db_engine=self.mock_db,
db_driver=mock_db_driver,
url_resolver=mock_url_resolver,
spectrum_base_url=mock_spectrum_base_url,
spectrum_rdir_url=None,
existing_table_handling=ExistingTableHandling.DROP_AND_RECREATE)
mock_url_resolver.directory_url.assert_called_with(mock_spectrum_base_url)
def test_init(self):
self.assertEqual(self.target.records_format, self.records_format)
self.assertEqual(self.target.db, self.mock_db)
@patch('records_mover.records.targets.spectrum.quote_schema_and_table')
def test_pre_load_hook_preps_bucket_with_default_prep(self, mock_quote_schema_and_table):
mock_schema_and_table = mock_quote_schema_and_table.return_value
mock_cursor = self.target.driver.db_engine.connect.return_value.__enter__.return_value
self.target.pre_load_hook()
mock_quote_schema_and_table.assert_called_with(self.target.db,
self.target.schema_name,
self.target.table_name)
mock_cursor.execution_options.assert_called_with(isolation_level='AUTOCOMMIT')
mock_cursor.execute.assert_called_with(f"DROP TABLE IF EXISTS {mock_schema_and_table}")
self.mock_output_loc.purge_directory.assert_called_with()
@patch('records_mover.records.targets.spectrum.RecordsDirectory')
def test_records_directory(self, mock_RecordsDirectory):
out = self.target.records_directory()
mock_RecordsDirectory.assert_called_with(self.mock_output_loc)
self.assertEqual(out, mock_RecordsDirectory.return_value)
@patch('records_mover.records.targets.spectrum.CreateTable')
@patch('records_mover.records.targets.spectrum.Table')
@patch('records_mover.records.targets.spectrum.MetaData')
@patch('records_mover.records.targets.spectrum.RecordsDirectory')
def test_post_load_hook_creates_table(self,
mock_RecordsDirectory,
mock_MetaData,
mock_Table,
mock_CreateTable):
mock_num_rows_loaded = 123
mock_directory = mock_RecordsDirectory.return_value
mock_records_schema = mock_directory.load_schema_json_obj.return_value
mock_field = Mock(name='field')
mock_records_schema.fields = [mock_field]
mock_meta = mock_MetaData.return_value
mock_columns = [mock_field.to_sqlalchemy_column.return_value]
mock_table = mock_Table.return_value
mock_CreateTable.return_value = "SOME GENERATED CREATE TABLES STATEMENT "
mock_cursor = self.target.driver.db_engine.connect.return_value.__enter__.return_value
self.target.post_load_hook(num_rows_loaded=mock_num_rows_loaded)
mock_directory.load_schema_json_obj.assert_called_with()
mock_directory.get_manifest.assert_called_with()
mock_field.to_sqlalchemy_column.assert_called_with(self.mock_driver)
mock_Table.assert_called_with('mytable', mock_meta,
*mock_columns, prefixes=['EXTERNAL'], schema='myschema')
mock_CreateTable.assert_called_with(mock_table, bind=self.mock_driver.db_engine)
mock_cursor.execution_options.assert_called_with(isolation_level='AUTOCOMMIT')
mock_cursor.execute.assert_called_with("SOME GENERATED CREATE TABLES STATEMENT "
"STORED AS PARQUET\n"
"LOCATION 's3://output-loc/_manifest'\n\n"
"TABLE PROPERTIES ('numRows'='123')")
| [
"mock.Mock",
"mock.patch",
"mock.MagicMock",
"records_mover.records.targets.spectrum.SpectrumRecordsTarget"
] | [((255, 323), 'mock.patch', 'patch', (['"""records_mover.records.targets.spectrum.ParquetRecordsFormat"""'], {}), "('records_mover.records.targets.spectrum.ParquetRecordsFormat')\n", (260, 323), False, 'from mock import Mock, patch, MagicMock\n'), ((1992, 2062), 'mock.patch', 'patch', (['"""records_mover.records.targets.spectrum.quote_schema_and_table"""'], {}), "('records_mover.records.targets.spectrum.quote_schema_and_table')\n", (1997, 2062), False, 'from mock import Mock, patch, MagicMock\n'), ((2847, 2911), 'mock.patch', 'patch', (['"""records_mover.records.targets.spectrum.RecordsDirectory"""'], {}), "('records_mover.records.targets.spectrum.RecordsDirectory')\n", (2852, 2911), False, 'from mock import Mock, patch, MagicMock\n'), ((3162, 3221), 'mock.patch', 'patch', (['"""records_mover.records.targets.spectrum.CreateTable"""'], {}), "('records_mover.records.targets.spectrum.CreateTable')\n", (3167, 3221), False, 'from mock import Mock, patch, MagicMock\n'), ((3227, 3280), 'mock.patch', 'patch', (['"""records_mover.records.targets.spectrum.Table"""'], {}), "('records_mover.records.targets.spectrum.Table')\n", (3232, 3280), False, 'from mock import Mock, patch, MagicMock\n'), ((3286, 3342), 'mock.patch', 'patch', (['"""records_mover.records.targets.spectrum.MetaData"""'], {}), "('records_mover.records.targets.spectrum.MetaData')\n", (3291, 3342), False, 'from mock import Mock, patch, MagicMock\n'), ((3348, 3412), 'mock.patch', 'patch', (['"""records_mover.records.targets.spectrum.RecordsDirectory"""'], {}), "('records_mover.records.targets.spectrum.RecordsDirectory')\n", (3353, 3412), False, 'from mock import Mock, patch, MagicMock\n'), ((488, 513), 'mock.Mock', 'Mock', ([], {'name': '"""url_resolver"""'}), "(name='url_resolver')\n", (492, 513), False, 'from mock import Mock, patch, MagicMock\n'), ((539, 561), 'mock.Mock', 'Mock', ([], {'name': '"""db_driver"""'}), "(name='db_driver')\n", (543, 561), False, 'from mock import Mock, patch, MagicMock\n'), ((595, 625), 'mock.Mock', 'Mock', ([], {'name': '"""spectrum_base_url"""'}), "(name='spectrum_base_url')\n", (599, 625), False, 'from mock import Mock, patch, MagicMock\n'), ((704, 724), 'mock.MagicMock', 'MagicMock', ([], {'name': '"""db"""'}), "(name='db')\n", (713, 724), False, 'from mock import Mock, patch, MagicMock\n'), ((1217, 1528), 'records_mover.records.targets.spectrum.SpectrumRecordsTarget', 'SpectrumRecordsTarget', ([], {'schema_name': 'mock_schema_name', 'table_name': 'mock_table_name', 'db_engine': 'self.mock_db', 'db_driver': 'mock_db_driver', 'url_resolver': 'mock_url_resolver', 'spectrum_base_url': 'mock_spectrum_base_url', 'spectrum_rdir_url': 'None', 'existing_table_handling': 'ExistingTableHandling.DROP_AND_RECREATE'}), '(schema_name=mock_schema_name, table_name=\n mock_table_name, db_engine=self.mock_db, db_driver=mock_db_driver,\n url_resolver=mock_url_resolver, spectrum_base_url=\n mock_spectrum_base_url, spectrum_rdir_url=None, existing_table_handling\n =ExistingTableHandling.DROP_AND_RECREATE)\n', (1238, 1528), False, 'from records_mover.records.targets.spectrum import SpectrumRecordsTarget\n'), ((3893, 3911), 'mock.Mock', 'Mock', ([], {'name': '"""field"""'}), "(name='field')\n", (3897, 3911), False, 'from mock import Mock, patch, MagicMock\n')] |
# Generated by Django 3.0.11 on 2020-12-09 06:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('telephone_directory', '0002_auto_20201208_1801'),
]
operations = [
migrations.AddField(
model_name='contacts',
name='profile_pic',
field=models.ImageField(blank=True, null=True, upload_to='profile_pic/'),
),
]
| [
"django.db.models.ImageField"
] | [((354, 420), 'django.db.models.ImageField', 'models.ImageField', ([], {'blank': '(True)', 'null': '(True)', 'upload_to': '"""profile_pic/"""'}), "(blank=True, null=True, upload_to='profile_pic/')\n", (371, 420), False, 'from django.db import migrations, models\n')] |
import arcpy, os
FC_Frontage_Roads = arcpy.GetParameterAsText(0)
FC_Centerlines = arcpy.GetParameterAsText(1)
Routed_SubFiles = arcpy.GetParameterAsText(2)
District_Boundaries = arcpy.GetParameterAsText(3)
MPO_Boundaries = arcpy.GetParameterAsText(4)
outputFolder = arcpy.GetParameterAsText(5)
scracthSpace = outputFolder + os.sep + "scratchSpace" + os.sep
if not os.path.exists(scracthSpace):
os.makedirs(scracthSpace)
inputs_to_merge = [Routed_SubFiles, FC_Centerlines, FC_Frontage_Roads]
FC_roadways_merged = scracthSpace + "temp_FC_roadways_merged.shp"
District_Boundaries_w_o_MPO = scracthSpace + "temp_District_Boundaries_w_o_MPO.shp"
intersect_features1 = [FC_roadways_merged, MPO_Boundaries]
FC_roads_by_MP0 = scracthSpace + "temp_FC_roads_by_MPO.shp"
intersect_features2 = [FC_roadways_merged, District_Boundaries_w_o_MPO]
FC_roads_by_District = scracthSpace + "temp_FC_roads_by_District.shp"
Final_FC_Roads_by_MPO = outputFolder + os.sep + "FC_Roads_by_MPO.shp"
Final_FC_Roads_by_Districts = outputFolder + os.sep + "FC_Roads_by_Districts.shp"
arcpy.SetProgressor("step", "Merging... Please Wait (this can take a while)", 0,9,1)
arcpy.AddMessage("Merging Datasets:\n%s\n%s\n%s" % (inputs_to_merge[0],inputs_to_merge[1],inputs_to_merge[2]))
arcpy.Merge_management(inputs_to_merge, FC_roadways_merged)
arcpy.AddMessage("File Created: %s\n" % FC_roadways_merged)
arcpy.SetProgressorPosition()
arcpy.SetProgressorLabel("Erase Analysis...")
arcpy.AddMessage("Creating Distrct Bounadries without MPOs")
arcpy.Erase_analysis(District_Boundaries, MPO_Boundaries, District_Boundaries_w_o_MPO )
arcpy.AddMessage("File Created: %s\n" % District_Boundaries_w_o_MPO )
arcpy.SetProgressorPosition()
arcpy.SetProgressorLabel("Intersect Analysis...")
arcpy.AddMessage("Intersecting... %s\nby... %s" % (intersect_features1[0], intersect_features1[1]))
arcpy.Intersect_analysis(intersect_features1, FC_roads_by_MPO, "ALL", "", "LINE")
arcpy.AddMessage("File Created: %s\n" % FC_Roads_by_MPO)
arcpy.SetProgressorPosition()
arcpy.SetProgressorLabel("Intersect Analysis...")
arcpy.AddMessage("Intersecting... %s\nby... %s" % (intersect_features2[0], intersect_features2[1]))
arcpy.Intersect_analysis(intersect_features2, FC_roads_by_District, "ALL", "", "LINE")
arcpy.AddMessage("File Created: %s\n" % FC_roads_by_District)
arcpy.SetProgressorPosition()
arcpy.SetProgressorLabel("Dissolving by MPO...")
arcpy.AddMessage("Dissolving... %s\nby... FUNCL_2008 and MPO_LBL" % FC_roads_by_MP0)
arcpy.Dissolve_management(FC_roads_by_MP0, Final_FC_Roads_by_MPO, "FUNCL_2008;MPO_LBL", "", "MULTI_PART", "DISSOLVE_LINES")
arcpy.AddMessage("File Created: %s\n" % Final_FC_Roads_by_MPO)
arcpy.SetProgressorPosition()
arcpy.SetProgressorLabel("Dissolving by District...")
arcpy.AddMessage("Dissolving... %s\nby... FUNCL_2008 and DIST_NM" % FC_roads_by_District)
arcpy.Dissolve_management(FC_roads_by_District, Final_FC_Roads_by_Districts, "FUNCL_2008;DIST_NM", "", "MULTI_PART", "DISSOLVE_LINES")
arcpy.AddMessage("File Created: %s\n" % Final_FC_Roads_by_Districts)
arcpy.SetProgressorPosition()
arcpy.SetProgressorLabel("Adding Field...")
arcpy.AddMessage("Adding Field TTL_MILES to %s..." % Final_FC_Roads_by_MPO)
arcpy.AddField_management(Final_FC_Roads_by_MPO, "TTL_MILES", "FLOAT", "9", "4", "", "", "NON_NULLABLE", "NON_REQUIRED")
arcpy.AddMessage("Field add complete\n")
arcpy.SetProgressorPosition()
arcpy.SetProgressorLabel("Adding Field...")
arcpy.AddMessage("Adding Field TTL_MILES to %s..." % Final_FC_Roads_by_Districts)
arcpy.AddField_management(Final_FC_Roads_by_Districts, "TTL_MILES", "FLOAT", "9", "4", "", "", "NON_NULLABLE", "NON_REQUIRED")
arcpy.AddMessage("Field add complete\n")
arcpy.SetProgressorPosition()
arcpy.SetProgressorLabel("Calculating Field...")
arcpy.AddMessage("Calculating Total Length in Miles for field TTL_MILES in\n%s" % Final_FC_Roads_by_MPO)
arcpy.CalculateField_management(Final_FC_Roads_by_MPO, "TTL_MILES", "!shape.length@miles!", "PYTHON_9.3")
arcpy.AddMessage("%s - Done!\n" % Final_FC_Roads_by_MPO)
arcpy.SetProgressorPosition()
arcpy.SetProgressorLabel("Calculating Field...")
arcpy.AddMessage("Calculating Total Length in Miles for field TTL_MILES in\n%s" % Final_FC_Roads_by_Districts)
arcpy.CalculateField_management(Final_FC_Roads_by_Districts, "TTL_MILES", "!shape.length@miles!", "PYTHON_9.3")
arcpy.AddMessage("%s - Done!\n" % Final_FC_Roads_by_Districts)
arcpy.ResetProgressor() | [
"os.path.exists",
"arcpy.CalculateField_management",
"arcpy.Dissolve_management",
"arcpy.Merge_management",
"arcpy.Erase_analysis",
"arcpy.AddMessage",
"arcpy.AddField_management",
"os.makedirs",
"arcpy.SetProgressorPosition",
"arcpy.SetProgressorLabel",
"arcpy.Intersect_analysis",
"arcpy.GetP... | [((37, 64), 'arcpy.GetParameterAsText', 'arcpy.GetParameterAsText', (['(0)'], {}), '(0)\n', (61, 64), False, 'import arcpy, os\n'), ((82, 109), 'arcpy.GetParameterAsText', 'arcpy.GetParameterAsText', (['(1)'], {}), '(1)\n', (106, 109), False, 'import arcpy, os\n'), ((128, 155), 'arcpy.GetParameterAsText', 'arcpy.GetParameterAsText', (['(2)'], {}), '(2)\n', (152, 155), False, 'import arcpy, os\n'), ((178, 205), 'arcpy.GetParameterAsText', 'arcpy.GetParameterAsText', (['(3)'], {}), '(3)\n', (202, 205), False, 'import arcpy, os\n'), ((223, 250), 'arcpy.GetParameterAsText', 'arcpy.GetParameterAsText', (['(4)'], {}), '(4)\n', (247, 250), False, 'import arcpy, os\n'), ((266, 293), 'arcpy.GetParameterAsText', 'arcpy.GetParameterAsText', (['(5)'], {}), '(5)\n', (290, 293), False, 'import arcpy, os\n'), ((1062, 1152), 'arcpy.SetProgressor', 'arcpy.SetProgressor', (['"""step"""', '"""Merging... Please Wait (this can take a while)"""', '(0)', '(9)', '(1)'], {}), "('step',\n 'Merging... Please Wait (this can take a while)', 0, 9, 1)\n", (1081, 1152), False, 'import arcpy, os\n'), ((1147, 1264), 'arcpy.AddMessage', 'arcpy.AddMessage', (['("""Merging Datasets:\n%s\n%s\n%s""" % (inputs_to_merge[0], inputs_to_merge[1],\n inputs_to_merge[2]))'], {}), '("""Merging Datasets:\n%s\n%s\n%s""" % (inputs_to_merge[0],\n inputs_to_merge[1], inputs_to_merge[2]))\n', (1163, 1264), False, 'import arcpy, os\n'), ((1258, 1317), 'arcpy.Merge_management', 'arcpy.Merge_management', (['inputs_to_merge', 'FC_roadways_merged'], {}), '(inputs_to_merge, FC_roadways_merged)\n', (1280, 1317), False, 'import arcpy, os\n'), ((1318, 1377), 'arcpy.AddMessage', 'arcpy.AddMessage', (["('File Created: %s\\n' % FC_roadways_merged)"], {}), "('File Created: %s\\n' % FC_roadways_merged)\n", (1334, 1377), False, 'import arcpy, os\n'), ((1378, 1407), 'arcpy.SetProgressorPosition', 'arcpy.SetProgressorPosition', ([], {}), '()\n', (1405, 1407), False, 'import arcpy, os\n'), ((1409, 1454), 'arcpy.SetProgressorLabel', 'arcpy.SetProgressorLabel', (['"""Erase Analysis..."""'], {}), "('Erase Analysis...')\n", (1433, 1454), False, 'import arcpy, os\n'), ((1456, 1516), 'arcpy.AddMessage', 'arcpy.AddMessage', (['"""Creating Distrct Bounadries without MPOs"""'], {}), "('Creating Distrct Bounadries without MPOs')\n", (1472, 1516), False, 'import arcpy, os\n'), ((1517, 1607), 'arcpy.Erase_analysis', 'arcpy.Erase_analysis', (['District_Boundaries', 'MPO_Boundaries', 'District_Boundaries_w_o_MPO'], {}), '(District_Boundaries, MPO_Boundaries,\n District_Boundaries_w_o_MPO)\n', (1537, 1607), False, 'import arcpy, os\n'), ((1605, 1673), 'arcpy.AddMessage', 'arcpy.AddMessage', (["('File Created: %s\\n' % District_Boundaries_w_o_MPO)"], {}), "('File Created: %s\\n' % District_Boundaries_w_o_MPO)\n", (1621, 1673), False, 'import arcpy, os\n'), ((1675, 1704), 'arcpy.SetProgressorPosition', 'arcpy.SetProgressorPosition', ([], {}), '()\n', (1702, 1704), False, 'import arcpy, os\n'), ((1706, 1755), 'arcpy.SetProgressorLabel', 'arcpy.SetProgressorLabel', (['"""Intersect Analysis..."""'], {}), "('Intersect Analysis...')\n", (1730, 1755), False, 'import arcpy, os\n'), ((1757, 1864), 'arcpy.AddMessage', 'arcpy.AddMessage', (['("""Intersecting... %s\nby... %s""" % (intersect_features1[0],\n intersect_features1[1]))'], {}), '("""Intersecting... %s\nby... %s""" % (intersect_features1[0\n ], intersect_features1[1]))\n', (1773, 1864), False, 'import arcpy, os\n'), ((1857, 1942), 'arcpy.Intersect_analysis', 'arcpy.Intersect_analysis', (['intersect_features1', 'FC_roads_by_MPO', '"""ALL"""', '""""""', '"""LINE"""'], {}), "(intersect_features1, FC_roads_by_MPO, 'ALL', '',\n 'LINE')\n", (1881, 1942), False, 'import arcpy, os\n'), ((1939, 1995), 'arcpy.AddMessage', 'arcpy.AddMessage', (["('File Created: %s\\n' % FC_Roads_by_MPO)"], {}), "('File Created: %s\\n' % FC_Roads_by_MPO)\n", (1955, 1995), False, 'import arcpy, os\n'), ((1996, 2025), 'arcpy.SetProgressorPosition', 'arcpy.SetProgressorPosition', ([], {}), '()\n', (2023, 2025), False, 'import arcpy, os\n'), ((2027, 2076), 'arcpy.SetProgressorLabel', 'arcpy.SetProgressorLabel', (['"""Intersect Analysis..."""'], {}), "('Intersect Analysis...')\n", (2051, 2076), False, 'import arcpy, os\n'), ((2078, 2185), 'arcpy.AddMessage', 'arcpy.AddMessage', (['("""Intersecting... %s\nby... %s""" % (intersect_features2[0],\n intersect_features2[1]))'], {}), '("""Intersecting... %s\nby... %s""" % (intersect_features2[0\n ], intersect_features2[1]))\n', (2094, 2185), False, 'import arcpy, os\n'), ((2178, 2268), 'arcpy.Intersect_analysis', 'arcpy.Intersect_analysis', (['intersect_features2', 'FC_roads_by_District', '"""ALL"""', '""""""', '"""LINE"""'], {}), "(intersect_features2, FC_roads_by_District, 'ALL',\n '', 'LINE')\n", (2202, 2268), False, 'import arcpy, os\n'), ((2265, 2326), 'arcpy.AddMessage', 'arcpy.AddMessage', (["('File Created: %s\\n' % FC_roads_by_District)"], {}), "('File Created: %s\\n' % FC_roads_by_District)\n", (2281, 2326), False, 'import arcpy, os\n'), ((2327, 2356), 'arcpy.SetProgressorPosition', 'arcpy.SetProgressorPosition', ([], {}), '()\n', (2354, 2356), False, 'import arcpy, os\n'), ((2358, 2406), 'arcpy.SetProgressorLabel', 'arcpy.SetProgressorLabel', (['"""Dissolving by MPO..."""'], {}), "('Dissolving by MPO...')\n", (2382, 2406), False, 'import arcpy, os\n'), ((2408, 2499), 'arcpy.AddMessage', 'arcpy.AddMessage', (['("""Dissolving... %s\nby... FUNCL_2008 and MPO_LBL""" % FC_roads_by_MP0)'], {}), '("""Dissolving... %s\nby... FUNCL_2008 and MPO_LBL""" %\n FC_roads_by_MP0)\n', (2424, 2499), False, 'import arcpy, os\n'), ((2493, 2620), 'arcpy.Dissolve_management', 'arcpy.Dissolve_management', (['FC_roads_by_MP0', 'Final_FC_Roads_by_MPO', '"""FUNCL_2008;MPO_LBL"""', '""""""', '"""MULTI_PART"""', '"""DISSOLVE_LINES"""'], {}), "(FC_roads_by_MP0, Final_FC_Roads_by_MPO,\n 'FUNCL_2008;MPO_LBL', '', 'MULTI_PART', 'DISSOLVE_LINES')\n", (2518, 2620), False, 'import arcpy, os\n'), ((2617, 2679), 'arcpy.AddMessage', 'arcpy.AddMessage', (["('File Created: %s\\n' % Final_FC_Roads_by_MPO)"], {}), "('File Created: %s\\n' % Final_FC_Roads_by_MPO)\n", (2633, 2679), False, 'import arcpy, os\n'), ((2680, 2709), 'arcpy.SetProgressorPosition', 'arcpy.SetProgressorPosition', ([], {}), '()\n', (2707, 2709), False, 'import arcpy, os\n'), ((2711, 2764), 'arcpy.SetProgressorLabel', 'arcpy.SetProgressorLabel', (['"""Dissolving by District..."""'], {}), "('Dissolving by District...')\n", (2735, 2764), False, 'import arcpy, os\n'), ((2766, 2862), 'arcpy.AddMessage', 'arcpy.AddMessage', (['("""Dissolving... %s\nby... FUNCL_2008 and DIST_NM""" % FC_roads_by_District)'], {}), '("""Dissolving... %s\nby... FUNCL_2008 and DIST_NM""" %\n FC_roads_by_District)\n', (2782, 2862), False, 'import arcpy, os\n'), ((2856, 2994), 'arcpy.Dissolve_management', 'arcpy.Dissolve_management', (['FC_roads_by_District', 'Final_FC_Roads_by_Districts', '"""FUNCL_2008;DIST_NM"""', '""""""', '"""MULTI_PART"""', '"""DISSOLVE_LINES"""'], {}), "(FC_roads_by_District, Final_FC_Roads_by_Districts,\n 'FUNCL_2008;DIST_NM', '', 'MULTI_PART', 'DISSOLVE_LINES')\n", (2881, 2994), False, 'import arcpy, os\n'), ((2991, 3059), 'arcpy.AddMessage', 'arcpy.AddMessage', (["('File Created: %s\\n' % Final_FC_Roads_by_Districts)"], {}), "('File Created: %s\\n' % Final_FC_Roads_by_Districts)\n", (3007, 3059), False, 'import arcpy, os\n'), ((3060, 3089), 'arcpy.SetProgressorPosition', 'arcpy.SetProgressorPosition', ([], {}), '()\n', (3087, 3089), False, 'import arcpy, os\n'), ((3091, 3134), 'arcpy.SetProgressorLabel', 'arcpy.SetProgressorLabel', (['"""Adding Field..."""'], {}), "('Adding Field...')\n", (3115, 3134), False, 'import arcpy, os\n'), ((3135, 3210), 'arcpy.AddMessage', 'arcpy.AddMessage', (["('Adding Field TTL_MILES to %s...' % Final_FC_Roads_by_MPO)"], {}), "('Adding Field TTL_MILES to %s...' % Final_FC_Roads_by_MPO)\n", (3151, 3210), False, 'import arcpy, os\n'), ((3211, 3335), 'arcpy.AddField_management', 'arcpy.AddField_management', (['Final_FC_Roads_by_MPO', '"""TTL_MILES"""', '"""FLOAT"""', '"""9"""', '"""4"""', '""""""', '""""""', '"""NON_NULLABLE"""', '"""NON_REQUIRED"""'], {}), "(Final_FC_Roads_by_MPO, 'TTL_MILES', 'FLOAT', '9',\n '4', '', '', 'NON_NULLABLE', 'NON_REQUIRED')\n", (3236, 3335), False, 'import arcpy, os\n'), ((3332, 3372), 'arcpy.AddMessage', 'arcpy.AddMessage', (['"""Field add complete\n"""'], {}), "('Field add complete\\n')\n", (3348, 3372), False, 'import arcpy, os\n'), ((3373, 3402), 'arcpy.SetProgressorPosition', 'arcpy.SetProgressorPosition', ([], {}), '()\n', (3400, 3402), False, 'import arcpy, os\n'), ((3404, 3447), 'arcpy.SetProgressorLabel', 'arcpy.SetProgressorLabel', (['"""Adding Field..."""'], {}), "('Adding Field...')\n", (3428, 3447), False, 'import arcpy, os\n'), ((3448, 3533), 'arcpy.AddMessage', 'arcpy.AddMessage', (["('Adding Field TTL_MILES to %s...' % Final_FC_Roads_by_Districts)"], {}), "('Adding Field TTL_MILES to %s...' %\n Final_FC_Roads_by_Districts)\n", (3464, 3533), False, 'import arcpy, os\n'), ((3530, 3660), 'arcpy.AddField_management', 'arcpy.AddField_management', (['Final_FC_Roads_by_Districts', '"""TTL_MILES"""', '"""FLOAT"""', '"""9"""', '"""4"""', '""""""', '""""""', '"""NON_NULLABLE"""', '"""NON_REQUIRED"""'], {}), "(Final_FC_Roads_by_Districts, 'TTL_MILES', 'FLOAT',\n '9', '4', '', '', 'NON_NULLABLE', 'NON_REQUIRED')\n", (3555, 3660), False, 'import arcpy, os\n'), ((3657, 3697), 'arcpy.AddMessage', 'arcpy.AddMessage', (['"""Field add complete\n"""'], {}), "('Field add complete\\n')\n", (3673, 3697), False, 'import arcpy, os\n'), ((3698, 3727), 'arcpy.SetProgressorPosition', 'arcpy.SetProgressorPosition', ([], {}), '()\n', (3725, 3727), False, 'import arcpy, os\n'), ((3729, 3777), 'arcpy.SetProgressorLabel', 'arcpy.SetProgressorLabel', (['"""Calculating Field..."""'], {}), "('Calculating Field...')\n", (3753, 3777), False, 'import arcpy, os\n'), ((3778, 3894), 'arcpy.AddMessage', 'arcpy.AddMessage', (['("""Calculating Total Length in Miles for field TTL_MILES in\n%s""" %\n Final_FC_Roads_by_MPO)'], {}), '(\n """Calculating Total Length in Miles for field TTL_MILES in\n%s""" %\n Final_FC_Roads_by_MPO)\n', (3794, 3894), False, 'import arcpy, os\n'), ((3883, 3992), 'arcpy.CalculateField_management', 'arcpy.CalculateField_management', (['Final_FC_Roads_by_MPO', '"""TTL_MILES"""', '"""!shape.length@miles!"""', '"""PYTHON_9.3"""'], {}), "(Final_FC_Roads_by_MPO, 'TTL_MILES',\n '!shape.length@miles!', 'PYTHON_9.3')\n", (3914, 3992), False, 'import arcpy, os\n'), ((3989, 4045), 'arcpy.AddMessage', 'arcpy.AddMessage', (["('%s - Done!\\n' % Final_FC_Roads_by_MPO)"], {}), "('%s - Done!\\n' % Final_FC_Roads_by_MPO)\n", (4005, 4045), False, 'import arcpy, os\n'), ((4046, 4075), 'arcpy.SetProgressorPosition', 'arcpy.SetProgressorPosition', ([], {}), '()\n', (4073, 4075), False, 'import arcpy, os\n'), ((4077, 4125), 'arcpy.SetProgressorLabel', 'arcpy.SetProgressorLabel', (['"""Calculating Field..."""'], {}), "('Calculating Field...')\n", (4101, 4125), False, 'import arcpy, os\n'), ((4126, 4248), 'arcpy.AddMessage', 'arcpy.AddMessage', (['("""Calculating Total Length in Miles for field TTL_MILES in\n%s""" %\n Final_FC_Roads_by_Districts)'], {}), '(\n """Calculating Total Length in Miles for field TTL_MILES in\n%s""" %\n Final_FC_Roads_by_Districts)\n', (4142, 4248), False, 'import arcpy, os\n'), ((4237, 4352), 'arcpy.CalculateField_management', 'arcpy.CalculateField_management', (['Final_FC_Roads_by_Districts', '"""TTL_MILES"""', '"""!shape.length@miles!"""', '"""PYTHON_9.3"""'], {}), "(Final_FC_Roads_by_Districts, 'TTL_MILES',\n '!shape.length@miles!', 'PYTHON_9.3')\n", (4268, 4352), False, 'import arcpy, os\n'), ((4349, 4411), 'arcpy.AddMessage', 'arcpy.AddMessage', (["('%s - Done!\\n' % Final_FC_Roads_by_Districts)"], {}), "('%s - Done!\\n' % Final_FC_Roads_by_Districts)\n", (4365, 4411), False, 'import arcpy, os\n'), ((4412, 4435), 'arcpy.ResetProgressor', 'arcpy.ResetProgressor', ([], {}), '()\n', (4433, 4435), False, 'import arcpy, os\n'), ((365, 393), 'os.path.exists', 'os.path.exists', (['scracthSpace'], {}), '(scracthSpace)\n', (379, 393), False, 'import arcpy, os\n'), ((396, 421), 'os.makedirs', 'os.makedirs', (['scracthSpace'], {}), '(scracthSpace)\n', (407, 421), False, 'import arcpy, os\n')] |
"""pah_fm URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from django.conf import settings
from django.conf.urls.static import static
from rest_framework.documentation import include_docs_urls
from pah_fm.views import CustomObtainJSONWebToken
from fleet_management.api import (
CarListView,
CurrentUserRetrieveView,
DriveView,
PassengerListView,
ProjectView,
RefuelView,
)
urlpatterns = [
path("admin/", admin.site.urls),
path("api/docs/", include_docs_urls(title="PAH-FM", public=False)),
path("api/api-token-auth/", CustomObtainJSONWebToken.as_view(), name="jwt"),
path("api/users/me", CurrentUserRetrieveView.as_view(), name="me"),
path("api/passengers", PassengerListView.as_view(), name="passengers"),
path("api/cars", CarListView.as_view(), name="cars"),
path("api/drives", DriveView.as_view(), name="drives"),
path("api/projects", ProjectView.as_view(), name="projects"),
path("api/refuels", RefuelView.as_view(), name="refuels"),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
| [
"fleet_management.api.ProjectView.as_view",
"fleet_management.api.PassengerListView.as_view",
"fleet_management.api.RefuelView.as_view",
"fleet_management.api.DriveView.as_view",
"fleet_management.api.CurrentUserRetrieveView.as_view",
"django.conf.urls.static.static",
"rest_framework.documentation.inclu... | [((1640, 1703), 'django.conf.urls.static.static', 'static', (['settings.STATIC_URL'], {'document_root': 'settings.STATIC_ROOT'}), '(settings.STATIC_URL, document_root=settings.STATIC_ROOT)\n', (1646, 1703), False, 'from django.conf.urls.static import static\n'), ((1055, 1086), 'django.urls.path', 'path', (['"""admin/"""', 'admin.site.urls'], {}), "('admin/', admin.site.urls)\n", (1059, 1086), False, 'from django.urls import path\n'), ((1110, 1157), 'rest_framework.documentation.include_docs_urls', 'include_docs_urls', ([], {'title': '"""PAH-FM"""', 'public': '(False)'}), "(title='PAH-FM', public=False)\n", (1127, 1157), False, 'from rest_framework.documentation import include_docs_urls\n'), ((1192, 1226), 'pah_fm.views.CustomObtainJSONWebToken.as_view', 'CustomObtainJSONWebToken.as_view', ([], {}), '()\n', (1224, 1226), False, 'from pah_fm.views import CustomObtainJSONWebToken\n'), ((1266, 1299), 'fleet_management.api.CurrentUserRetrieveView.as_view', 'CurrentUserRetrieveView.as_view', ([], {}), '()\n', (1297, 1299), False, 'from fleet_management.api import CarListView, CurrentUserRetrieveView, DriveView, PassengerListView, ProjectView, RefuelView\n'), ((1340, 1367), 'fleet_management.api.PassengerListView.as_view', 'PassengerListView.as_view', ([], {}), '()\n', (1365, 1367), False, 'from fleet_management.api import CarListView, CurrentUserRetrieveView, DriveView, PassengerListView, ProjectView, RefuelView\n'), ((1410, 1431), 'fleet_management.api.CarListView.as_view', 'CarListView.as_view', ([], {}), '()\n', (1429, 1431), False, 'from fleet_management.api import CarListView, CurrentUserRetrieveView, DriveView, PassengerListView, ProjectView, RefuelView\n'), ((1470, 1489), 'fleet_management.api.DriveView.as_view', 'DriveView.as_view', ([], {}), '()\n', (1487, 1489), False, 'from fleet_management.api import CarListView, CurrentUserRetrieveView, DriveView, PassengerListView, ProjectView, RefuelView\n'), ((1532, 1553), 'fleet_management.api.ProjectView.as_view', 'ProjectView.as_view', ([], {}), '()\n', (1551, 1553), False, 'from fleet_management.api import CarListView, CurrentUserRetrieveView, DriveView, PassengerListView, ProjectView, RefuelView\n'), ((1597, 1617), 'fleet_management.api.RefuelView.as_view', 'RefuelView.as_view', ([], {}), '()\n', (1615, 1617), False, 'from fleet_management.api import CarListView, CurrentUserRetrieveView, DriveView, PassengerListView, ProjectView, RefuelView\n')] |