text
stringlengths 4
1.02M
| meta
dict |
|---|---|
"""
This package defines classes that simplify bit-wise creation, manipulation and
interpretation of data.
Classes:
Bits -- An immutable container for binary data.
BitArray -- A mutable container for binary data.
ConstBitStream -- An immutable container with streaming methods.
BitStream -- A mutable container with streaming methods.
Bits (base class)
/ \
+ mutating methods / \ + streaming methods
/ \
BitArray ConstBitStream
\ /
\ /
\ /
BitStream
Functions:
pack -- Create a BitStream from a format string.
Exceptions:
Error -- Module exception base class.
CreationError -- Error during creation.
InterpretError -- Inappropriate interpretation of binary data.
ByteAlignError -- Whole byte position or length needed.
ReadError -- Reading or peeking past the end of a bitstring.
https://github.com/scott-griffiths/bitstring
"""
__licence__ = """
The MIT License
Copyright (c) 2006-2016 Scott Griffiths (dr.scottgriffiths@gmail.com)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
__version__ = "3.1.5"
__author__ = "Scott Griffiths"
import numbers
import copy
import sys
import re
import binascii
import mmap
import os
import struct
import operator
import collections
import array
byteorder = sys.byteorder
bytealigned = False
"""Determines whether a number of methods default to working only on byte boundaries."""
# Maximum number of digits to use in __str__ and __repr__.
MAX_CHARS = 250
# Maximum size of caches used for speed optimisations.
CACHE_SIZE = 1000
class Error(Exception):
"""Base class for errors in the bitstring module."""
def __init__(self, *params):
self.msg = params[0] if params else ''
self.params = params[1:]
def __str__(self):
if self.params:
return self.msg.format(*self.params)
return self.msg
class ReadError(Error, IndexError):
"""Reading or peeking past the end of a bitstring."""
def __init__(self, *params):
Error.__init__(self, *params)
class InterpretError(Error, ValueError):
"""Inappropriate interpretation of binary data."""
def __init__(self, *params):
Error.__init__(self, *params)
class ByteAlignError(Error):
"""Whole-byte position or length needed."""
def __init__(self, *params):
Error.__init__(self, *params)
class CreationError(Error, ValueError):
"""Inappropriate argument during bitstring creation."""
def __init__(self, *params):
Error.__init__(self, *params)
class ConstByteStore(object):
"""Stores raw bytes together with a bit offset and length.
Used internally - not part of public interface.
"""
__slots__ = ('offset', '_rawarray', 'bitlength')
def __init__(self, data, bitlength=None, offset=None):
"""data is either a bytearray or a MmapByteArray"""
self._rawarray = data
if offset is None:
offset = 0
if bitlength is None:
bitlength = 8 * len(data) - offset
self.offset = offset
self.bitlength = bitlength
def getbit(self, pos):
assert 0 <= pos < self.bitlength
byte, bit = divmod(self.offset + pos, 8)
return bool(self._rawarray[byte] & (128 >> bit))
def getbyte(self, pos):
"""Direct access to byte data."""
return self._rawarray[pos]
def getbyteslice(self, start, end):
"""Direct access to byte data."""
c = self._rawarray[start:end]
return c
@property
def bytelength(self):
if not self.bitlength:
return 0
sb = self.offset // 8
eb = (self.offset + self.bitlength - 1) // 8
return eb - sb + 1
def __copy__(self):
return ByteStore(self._rawarray[:], self.bitlength, self.offset)
def _appendstore(self, store):
"""Join another store on to the end of this one."""
if not store.bitlength:
return
# Set new array offset to the number of bits in the final byte of current array.
store = offsetcopy(store, (self.offset + self.bitlength) % 8)
if store.offset:
# first do the byte with the join.
joinval = (self._rawarray.pop() & (255 ^ (255 >> store.offset)) |
(store.getbyte(0) & (255 >> store.offset)))
self._rawarray.append(joinval)
self._rawarray.extend(store._rawarray[1:])
else:
self._rawarray.extend(store._rawarray)
self.bitlength += store.bitlength
def _prependstore(self, store):
"""Join another store on to the start of this one."""
if not store.bitlength:
return
# Set the offset of copy of store so that it's final byte
# ends in a position that matches the offset of self,
# then join self on to the end of it.
store = offsetcopy(store, (self.offset - store.bitlength) % 8)
assert (store.offset + store.bitlength) % 8 == self.offset % 8
bit_offset = self.offset % 8
if bit_offset:
# first do the byte with the join.
store.setbyte(-1, (store.getbyte(-1) & (255 ^ (255 >> bit_offset)) | \
(self._rawarray[self.byteoffset] & (255 >> bit_offset))))
store._rawarray.extend(self._rawarray[self.byteoffset + 1: self.byteoffset + self.bytelength])
else:
store._rawarray.extend(self._rawarray[self.byteoffset: self.byteoffset + self.bytelength])
self._rawarray = store._rawarray
self.offset = store.offset
self.bitlength += store.bitlength
@property
def byteoffset(self):
return self.offset // 8
@property
def rawbytes(self):
return self._rawarray
class ByteStore(ConstByteStore):
"""Adding mutating methods to ConstByteStore
Used internally - not part of public interface.
"""
__slots__ = ()
def setbit(self, pos):
assert 0 <= pos < self.bitlength
byte, bit = divmod(self.offset + pos, 8)
self._rawarray[byte] |= (128 >> bit)
def unsetbit(self, pos):
assert 0 <= pos < self.bitlength
byte, bit = divmod(self.offset + pos, 8)
self._rawarray[byte] &= ~(128 >> bit)
def invertbit(self, pos):
assert 0 <= pos < self.bitlength
byte, bit = divmod(self.offset + pos, 8)
self._rawarray[byte] ^= (128 >> bit)
def setbyte(self, pos, value):
self._rawarray[pos] = value
def setbyteslice(self, start, end, value):
self._rawarray[start:end] = value
def offsetcopy(s, newoffset):
"""Return a copy of a ByteStore with the newoffset.
Not part of public interface.
"""
assert 0 <= newoffset < 8
if not s.bitlength:
return copy.copy(s)
else:
if newoffset == s.offset % 8:
return ByteStore(s.getbyteslice(s.byteoffset, s.byteoffset + s.bytelength), s.bitlength, newoffset)
newdata = []
d = s._rawarray
assert newoffset != s.offset % 8
if newoffset < s.offset % 8:
# We need to shift everything left
shiftleft = s.offset % 8 - newoffset
# First deal with everything except for the final byte
for x in range(s.byteoffset, s.byteoffset + s.bytelength - 1):
newdata.append(((d[x] << shiftleft) & 0xff) +\
(d[x + 1] >> (8 - shiftleft)))
bits_in_last_byte = (s.offset + s.bitlength) % 8
if not bits_in_last_byte:
bits_in_last_byte = 8
if bits_in_last_byte > shiftleft:
newdata.append((d[s.byteoffset + s.bytelength - 1] << shiftleft) & 0xff)
else: # newoffset > s._offset % 8
shiftright = newoffset - s.offset % 8
newdata.append(s.getbyte(0) >> shiftright)
for x in range(s.byteoffset + 1, s.byteoffset + s.bytelength):
newdata.append(((d[x - 1] << (8 - shiftright)) & 0xff) +\
(d[x] >> shiftright))
bits_in_last_byte = (s.offset + s.bitlength) % 8
if not bits_in_last_byte:
bits_in_last_byte = 8
if bits_in_last_byte + shiftright > 8:
newdata.append((d[s.byteoffset + s.bytelength - 1] << (8 - shiftright)) & 0xff)
new_s = ByteStore(bytearray(newdata), s.bitlength, newoffset)
assert new_s.offset == newoffset
return new_s
def equal(a, b):
"""Return True if ByteStores a == b.
Not part of public interface.
"""
# We want to return False for inequality as soon as possible, which
# means we get lots of special cases.
# First the easy one - compare lengths:
a_bitlength = a.bitlength
b_bitlength = b.bitlength
if a_bitlength != b_bitlength:
return False
if not a_bitlength:
assert b_bitlength == 0
return True
# Make 'a' the one with the smaller offset
if (a.offset % 8) > (b.offset % 8):
a, b = b, a
# and create some aliases
a_bitoff = a.offset % 8
b_bitoff = b.offset % 8
a_byteoffset = a.byteoffset
b_byteoffset = b.byteoffset
a_bytelength = a.bytelength
b_bytelength = b.bytelength
da = a._rawarray
db = b._rawarray
# If they are pointing to the same data, they must be equal
if da is db and a.offset == b.offset:
return True
if a_bitoff == b_bitoff:
bits_spare_in_last_byte = 8 - (a_bitoff + a_bitlength) % 8
if bits_spare_in_last_byte == 8:
bits_spare_in_last_byte = 0
# Special case for a, b contained in a single byte
if a_bytelength == 1:
a_val = ((da[a_byteoffset] << a_bitoff) & 0xff) >> (8 - a_bitlength)
b_val = ((db[b_byteoffset] << b_bitoff) & 0xff) >> (8 - b_bitlength)
return a_val == b_val
# Otherwise check first byte
if da[a_byteoffset] & (0xff >> a_bitoff) != db[b_byteoffset] & (0xff >> b_bitoff):
return False
# then everything up to the last
b_a_offset = b_byteoffset - a_byteoffset
for x in range(1 + a_byteoffset, a_byteoffset + a_bytelength - 1):
if da[x] != db[b_a_offset + x]:
return False
# and finally the last byte
return (da[a_byteoffset + a_bytelength - 1] >> bits_spare_in_last_byte ==
db[b_byteoffset + b_bytelength - 1] >> bits_spare_in_last_byte)
assert a_bitoff != b_bitoff
# This is how much we need to shift a to the right to compare with b:
shift = b_bitoff - a_bitoff
# Special case for b only one byte long
if b_bytelength == 1:
assert a_bytelength == 1
a_val = ((da[a_byteoffset] << a_bitoff) & 0xff) >> (8 - a_bitlength)
b_val = ((db[b_byteoffset] << b_bitoff) & 0xff) >> (8 - b_bitlength)
return a_val == b_val
# Special case for a only one byte long
if a_bytelength == 1:
assert b_bytelength == 2
a_val = ((da[a_byteoffset] << a_bitoff) & 0xff) >> (8 - a_bitlength)
b_val = ((db[b_byteoffset] << 8) + db[b_byteoffset + 1]) << b_bitoff
b_val &= 0xffff
b_val >>= 16 - b_bitlength
return a_val == b_val
# Compare first byte of b with bits from first byte of a
if (da[a_byteoffset] & (0xff >> a_bitoff)) >> shift != db[b_byteoffset] & (0xff >> b_bitoff):
return False
# Now compare every full byte of b with bits from 2 bytes of a
for x in range(1, b_bytelength - 1):
# Construct byte from 2 bytes in a to compare to byte in b
b_val = db[b_byteoffset + x]
a_val = ((da[a_byteoffset + x - 1] << 8) + da[a_byteoffset + x]) >> shift
a_val &= 0xff
if a_val != b_val:
return False
# Now check bits in final byte of b
final_b_bits = (b.offset + b_bitlength) % 8
if not final_b_bits:
final_b_bits = 8
b_val = db[b_byteoffset + b_bytelength - 1] >> (8 - final_b_bits)
final_a_bits = (a.offset + a_bitlength) % 8
if not final_a_bits:
final_a_bits = 8
if b.bytelength > a_bytelength:
assert b_bytelength == a_bytelength + 1
a_val = da[a_byteoffset + a_bytelength - 1] >> (8 - final_a_bits)
a_val &= 0xff >> (8 - final_b_bits)
return a_val == b_val
assert a_bytelength == b_bytelength
a_val = da[a_byteoffset + a_bytelength - 2] << 8
a_val += da[a_byteoffset + a_bytelength - 1]
a_val >>= (8 - final_a_bits)
a_val &= 0xff >> (8 - final_b_bits)
return a_val == b_val
class MmapByteArray(object):
"""Looks like a bytearray, but from an mmap.
Not part of public interface.
"""
__slots__ = ('filemap', 'filelength', 'source', 'byteoffset', 'bytelength')
def __init__(self, source, bytelength=None, byteoffset=None):
self.source = source
source.seek(0, os.SEEK_END)
self.filelength = source.tell()
if byteoffset is None:
byteoffset = 0
if bytelength is None:
bytelength = self.filelength - byteoffset
self.byteoffset = byteoffset
self.bytelength = bytelength
self.filemap = mmap.mmap(source.fileno(), 0, access=mmap.ACCESS_READ)
def __getitem__(self, key):
try:
start = key.start
stop = key.stop
except AttributeError:
try:
assert 0 <= key < self.bytelength
return ord(self.filemap[key + self.byteoffset])
except TypeError:
# for Python 3
return self.filemap[key + self.byteoffset]
else:
if start is None:
start = 0
if stop is None:
stop = self.bytelength
assert key.step is None
assert 0 <= start < self.bytelength
assert 0 <= stop <= self.bytelength
s = slice(start + self.byteoffset, stop + self.byteoffset)
return bytearray(self.filemap.__getitem__(s))
def __len__(self):
return self.bytelength
# This creates a dictionary for every possible byte with the value being
# the key with its bits reversed.
BYTE_REVERSAL_DICT = dict()
# For Python 2.x/ 3.x coexistence
# Yes this is very very hacky.
try:
xrange
for i in range(256):
BYTE_REVERSAL_DICT[i] = chr(int("{0:08b}".format(i)[::-1], 2))
except NameError:
for i in range(256):
BYTE_REVERSAL_DICT[i] = bytes([int("{0:08b}".format(i)[::-1], 2)])
from io import IOBase as file
xrange = range
basestring = str
# Python 2.x octals start with '0', in Python 3 it's '0o'
LEADING_OCT_CHARS = len(oct(1)) - 1
def tidy_input_string(s):
"""Return string made lowercase and with all whitespace removed."""
s = ''.join(s.split()).lower()
return s
INIT_NAMES = ('uint', 'int', 'ue', 'se', 'sie', 'uie', 'hex', 'oct', 'bin', 'bits',
'uintbe', 'intbe', 'uintle', 'intle', 'uintne', 'intne',
'float', 'floatbe', 'floatle', 'floatne', 'bytes', 'bool', 'pad')
TOKEN_RE = re.compile(r'(?P<name>' + '|'.join(INIT_NAMES) +
r')((:(?P<len>[^=]+)))?(=(?P<value>.*))?$', re.IGNORECASE)
DEFAULT_UINT = re.compile(r'(?P<len>[^=]+)?(=(?P<value>.*))?$', re.IGNORECASE)
MULTIPLICATIVE_RE = re.compile(r'(?P<factor>.*)\*(?P<token>.+)')
# Hex, oct or binary literals
LITERAL_RE = re.compile(r'(?P<name>0(x|o|b))(?P<value>.+)', re.IGNORECASE)
# An endianness indicator followed by one or more struct.pack codes
STRUCT_PACK_RE = re.compile(r'(?P<endian><|>|@)?(?P<fmt>(?:\d*[bBhHlLqQfd])+)$')
# A number followed by a single character struct.pack code
STRUCT_SPLIT_RE = re.compile(r'\d*[bBhHlLqQfd]')
# These replicate the struct.pack codes
# Big-endian
REPLACEMENTS_BE = {'b': 'intbe:8', 'B': 'uintbe:8',
'h': 'intbe:16', 'H': 'uintbe:16',
'l': 'intbe:32', 'L': 'uintbe:32',
'q': 'intbe:64', 'Q': 'uintbe:64',
'f': 'floatbe:32', 'd': 'floatbe:64'}
# Little-endian
REPLACEMENTS_LE = {'b': 'intle:8', 'B': 'uintle:8',
'h': 'intle:16', 'H': 'uintle:16',
'l': 'intle:32', 'L': 'uintle:32',
'q': 'intle:64', 'Q': 'uintle:64',
'f': 'floatle:32', 'd': 'floatle:64'}
# Size in bytes of all the pack codes.
PACK_CODE_SIZE = {'b': 1, 'B': 1, 'h': 2, 'H': 2, 'l': 4, 'L': 4,
'q': 8, 'Q': 8, 'f': 4, 'd': 8}
_tokenname_to_initialiser = {'hex': 'hex', '0x': 'hex', '0X': 'hex', 'oct': 'oct',
'0o': 'oct', '0O': 'oct', 'bin': 'bin', '0b': 'bin',
'0B': 'bin', 'bits': 'auto', 'bytes': 'bytes', 'pad': 'pad'}
def structparser(token):
"""Parse struct-like format string token into sub-token list."""
m = STRUCT_PACK_RE.match(token)
if not m:
return [token]
else:
endian = m.group('endian')
if endian is None:
return [token]
# Split the format string into a list of 'q', '4h' etc.
formatlist = re.findall(STRUCT_SPLIT_RE, m.group('fmt'))
# Now deal with mulitiplicative factors, 4h -> hhhh etc.
fmt = ''.join([f[-1] * int(f[:-1]) if len(f) != 1 else
f for f in formatlist])
if endian == '@':
# Native endianness
if byteorder == 'little':
endian = '<'
else:
assert byteorder == 'big'
endian = '>'
if endian == '<':
tokens = [REPLACEMENTS_LE[c] for c in fmt]
else:
assert endian == '>'
tokens = [REPLACEMENTS_BE[c] for c in fmt]
return tokens
def tokenparser(fmt, keys=None, token_cache={}):
"""Divide the format string into tokens and parse them.
Return stretchy token and list of [initialiser, length, value]
initialiser is one of: hex, oct, bin, uint, int, se, ue, 0x, 0o, 0b etc.
length is None if not known, as is value.
If the token is in the keyword dictionary (keys) then it counts as a
special case and isn't messed with.
tokens must be of the form: [factor*][initialiser][:][length][=value]
"""
try:
return token_cache[(fmt, keys)]
except KeyError:
token_key = (fmt, keys)
# Very inefficient expanding of brackets.
fmt = expand_brackets(fmt)
# Split tokens by ',' and remove whitespace
# The meta_tokens can either be ordinary single tokens or multiple
# struct-format token strings.
meta_tokens = (''.join(f.split()) for f in fmt.split(','))
return_values = []
stretchy_token = False
for meta_token in meta_tokens:
# See if it has a multiplicative factor
m = MULTIPLICATIVE_RE.match(meta_token)
if not m:
factor = 1
else:
factor = int(m.group('factor'))
meta_token = m.group('token')
# See if it's a struct-like format
tokens = structparser(meta_token)
ret_vals = []
for token in tokens:
if keys and token in keys:
# Don't bother parsing it, it's a keyword argument
ret_vals.append([token, None, None])
continue
value = length = None
if token == '':
continue
# Match literal tokens of the form 0x... 0o... and 0b...
m = LITERAL_RE.match(token)
if m:
name = m.group('name')
value = m.group('value')
ret_vals.append([name, length, value])
continue
# Match everything else:
m1 = TOKEN_RE.match(token)
if not m1:
# and if you don't specify a 'name' then the default is 'uint':
m2 = DEFAULT_UINT.match(token)
if not m2:
raise ValueError("Don't understand token '{0}'.".format(token))
if m1:
name = m1.group('name')
length = m1.group('len')
if m1.group('value'):
value = m1.group('value')
else:
assert m2
name = 'uint'
length = m2.group('len')
if m2.group('value'):
value = m2.group('value')
if name == 'bool':
if length is not None:
raise ValueError("You can't specify a length with bool tokens - they are always one bit.")
length = 1
if length is None and name not in ('se', 'ue', 'sie', 'uie'):
stretchy_token = True
if length is not None:
# Try converting length to int, otherwise check it's a key.
try:
length = int(length)
if length < 0:
raise Error
# For the 'bytes' token convert length to bits.
if name == 'bytes':
length *= 8
except Error:
raise ValueError("Can't read a token with a negative length.")
except ValueError:
if not keys or length not in keys:
raise ValueError("Don't understand length '{0}' of token.".format(length))
ret_vals.append([name, length, value])
# This multiplies by the multiplicative factor, but this means that
# we can't allow keyword values as multipliers (e.g. n*uint:8).
# The only way to do this would be to return the factor in some fashion
# (we can't use the key's value here as it would mean that we couldn't
# sensibly continue to cache the function's results. (TODO).
return_values.extend(ret_vals * factor)
return_values = [tuple(x) for x in return_values]
if len(token_cache) < CACHE_SIZE:
token_cache[token_key] = stretchy_token, return_values
return stretchy_token, return_values
# Looks for first number*(
BRACKET_RE = re.compile(r'(?P<factor>\d+)\*\(')
def expand_brackets(s):
"""Remove whitespace and expand all brackets."""
s = ''.join(s.split())
while True:
start = s.find('(')
if start == -1:
break
count = 1 # Number of hanging open brackets
p = start + 1
while p < len(s):
if s[p] == '(':
count += 1
if s[p] == ')':
count -= 1
if not count:
break
p += 1
if count:
raise ValueError("Unbalanced parenthesis in '{0}'.".format(s))
if start == 0 or s[start - 1] != '*':
s = s[0:start] + s[start + 1:p] + s[p + 1:]
else:
m = BRACKET_RE.search(s)
if m:
factor = int(m.group('factor'))
matchstart = m.start('factor')
s = s[0:matchstart] + (factor - 1) * (s[start + 1:p] + ',') + s[start + 1:p] + s[p + 1:]
else:
raise ValueError("Failed to parse '{0}'.".format(s))
return s
# This converts a single octal digit to 3 bits.
OCT_TO_BITS = ['{0:03b}'.format(i) for i in xrange(8)]
# A dictionary of number of 1 bits contained in binary representation of any byte
BIT_COUNT = dict(zip(xrange(256), [bin(i).count('1') for i in xrange(256)]))
class Bits(object):
"""A container holding an immutable sequence of bits.
For a mutable container use the BitArray class instead.
Methods:
all() -- Check if all specified bits are set to 1 or 0.
any() -- Check if any of specified bits are set to 1 or 0.
count() -- Count the number of bits set to 1 or 0.
cut() -- Create generator of constant sized chunks.
endswith() -- Return whether the bitstring ends with a sub-string.
find() -- Find a sub-bitstring in the current bitstring.
findall() -- Find all occurrences of a sub-bitstring in the current bitstring.
join() -- Join bitstrings together using current bitstring.
rfind() -- Seek backwards to find a sub-bitstring.
split() -- Create generator of chunks split by a delimiter.
startswith() -- Return whether the bitstring starts with a sub-bitstring.
tobytes() -- Return bitstring as bytes, padding if needed.
tofile() -- Write bitstring to file, padding if needed.
unpack() -- Interpret bits using format string.
Special methods:
Also available are the operators [], ==, !=, +, *, ~, <<, >>, &, |, ^.
Properties:
bin -- The bitstring as a binary string.
bool -- For single bit bitstrings, interpret as True or False.
bytes -- The bitstring as a bytes object.
float -- Interpret as a floating point number.
floatbe -- Interpret as a big-endian floating point number.
floatle -- Interpret as a little-endian floating point number.
floatne -- Interpret as a native-endian floating point number.
hex -- The bitstring as a hexadecimal string.
int -- Interpret as a two's complement signed integer.
intbe -- Interpret as a big-endian signed integer.
intle -- Interpret as a little-endian signed integer.
intne -- Interpret as a native-endian signed integer.
len -- Length of the bitstring in bits.
oct -- The bitstring as an octal string.
se -- Interpret as a signed exponential-Golomb code.
ue -- Interpret as an unsigned exponential-Golomb code.
sie -- Interpret as a signed interleaved exponential-Golomb code.
uie -- Interpret as an unsigned interleaved exponential-Golomb code.
uint -- Interpret as a two's complement unsigned integer.
uintbe -- Interpret as a big-endian unsigned integer.
uintle -- Interpret as a little-endian unsigned integer.
uintne -- Interpret as a native-endian unsigned integer.
"""
__slots__ = ('_datastore')
def __init__(self, auto=None, length=None, offset=None, **kwargs):
"""Either specify an 'auto' initialiser:
auto -- a string of comma separated tokens, an integer, a file object,
a bytearray, a boolean iterable, an array or another bitstring.
Or initialise via **kwargs with one (and only one) of:
bytes -- raw data as a string, for example read from a binary file.
bin -- binary string representation, e.g. '0b001010'.
hex -- hexadecimal string representation, e.g. '0x2ef'
oct -- octal string representation, e.g. '0o777'.
uint -- an unsigned integer.
int -- a signed integer.
float -- a floating point number.
uintbe -- an unsigned big-endian whole byte integer.
intbe -- a signed big-endian whole byte integer.
floatbe - a big-endian floating point number.
uintle -- an unsigned little-endian whole byte integer.
intle -- a signed little-endian whole byte integer.
floatle -- a little-endian floating point number.
uintne -- an unsigned native-endian whole byte integer.
intne -- a signed native-endian whole byte integer.
floatne -- a native-endian floating point number.
se -- a signed exponential-Golomb code.
ue -- an unsigned exponential-Golomb code.
sie -- a signed interleaved exponential-Golomb code.
uie -- an unsigned interleaved exponential-Golomb code.
bool -- a boolean (True or False).
filename -- a file which will be opened in binary read-only mode.
Other keyword arguments:
length -- length of the bitstring in bits, if needed and appropriate.
It must be supplied for all integer and float initialisers.
offset -- bit offset to the data. These offset bits are
ignored and this is mainly intended for use when
initialising using 'bytes' or 'filename'.
"""
pass
def __new__(cls, auto=None, length=None, offset=None, _cache={}, **kwargs):
# For instances auto-initialised with a string we intern the
# instance for re-use.
try:
if isinstance(auto, basestring):
try:
return _cache[auto]
except KeyError:
x = object.__new__(Bits)
try:
_, tokens = tokenparser(auto)
except ValueError as e:
raise CreationError(*e.args)
x._datastore = ConstByteStore(bytearray(0), 0, 0)
for token in tokens:
x._datastore._appendstore(Bits._init_with_token(*token)._datastore)
assert x._assertsanity()
if len(_cache) < CACHE_SIZE:
_cache[auto] = x
return x
if type(auto) == Bits:
return auto
except TypeError:
pass
x = super(Bits, cls).__new__(cls)
x._initialise(auto, length, offset, **kwargs)
return x
def _initialise(self, auto, length, offset, **kwargs):
if length is not None and length < 0:
raise CreationError("bitstring length cannot be negative.")
if offset is not None and offset < 0:
raise CreationError("offset must be >= 0.")
if auto is not None:
self._initialise_from_auto(auto, length, offset)
return
if not kwargs:
# No initialisers, so initialise with nothing or zero bits
if length is not None and length != 0:
data = bytearray((length + 7) // 8)
self._setbytes_unsafe(data, length, 0)
return
self._setbytes_unsafe(bytearray(0), 0, 0)
return
k, v = kwargs.popitem()
try:
init_without_length_or_offset[k](self, v)
if length is not None or offset is not None:
raise CreationError("Cannot use length or offset with this initialiser.")
except KeyError:
try:
init_with_length_only[k](self, v, length)
if offset is not None:
raise CreationError("Cannot use offset with this initialiser.")
except KeyError:
if offset is None:
offset = 0
try:
init_with_length_and_offset[k](self, v, length, offset)
except KeyError:
raise CreationError("Unrecognised keyword '{0}' used to initialise.", k)
def _initialise_from_auto(self, auto, length, offset):
if offset is None:
offset = 0
self._setauto(auto, length, offset)
return
def __copy__(self):
"""Return a new copy of the Bits for the copy module."""
# Note that if you want a new copy (different ID), use _copy instead.
# The copy can return self as it's immutable.
return self
def __lt__(self, other):
raise TypeError("unorderable type: {0}".format(type(self).__name__))
def __gt__(self, other):
raise TypeError("unorderable type: {0}".format(type(self).__name__))
def __le__(self, other):
raise TypeError("unorderable type: {0}".format(type(self).__name__))
def __ge__(self, other):
raise TypeError("unorderable type: {0}".format(type(self).__name__))
def __add__(self, bs):
"""Concatenate bitstrings and return new bitstring.
bs -- the bitstring to append.
"""
bs = Bits(bs)
if bs.len <= self.len:
s = self._copy()
s._append(bs)
else:
s = bs._copy()
s = self.__class__(s)
s._prepend(self)
return s
def __radd__(self, bs):
"""Append current bitstring to bs and return new bitstring.
bs -- the string for the 'auto' initialiser that will be appended to.
"""
bs = self._converttobitstring(bs)
return bs.__add__(self)
def __getitem__(self, key):
"""Return a new bitstring representing a slice of the current bitstring.
Indices are in units of the step parameter (default 1 bit).
Stepping is used to specify the number of bits in each item.
>>> print BitArray('0b00110')[1:4]
'0b011'
>>> print BitArray('0x00112233')[1:3:8]
'0x1122'
"""
length = self.len
try:
step = key.step if key.step is not None else 1
except AttributeError:
# single element
if key < 0:
key += length
if not 0 <= key < length:
raise IndexError("Slice index out of range.")
# Single bit, return True or False
return self._datastore.getbit(key)
else:
if step != 1:
# convert to binary string and use string slicing
bs = self.__class__()
bs._setbin_unsafe(self._getbin().__getitem__(key))
return bs
start, stop = 0, length
if key.start is not None:
start = key.start
if key.start < 0:
start += stop
if key.stop is not None:
stop = key.stop
if key.stop < 0:
stop += length
start = max(start, 0)
stop = min(stop, length)
if start < stop:
return self._slice(start, stop)
else:
return self.__class__()
def __len__(self):
"""Return the length of the bitstring in bits."""
return self._getlength()
def __str__(self):
"""Return approximate string representation of bitstring for printing.
Short strings will be given wholly in hexadecimal or binary. Longer
strings may be part hexadecimal and part binary. Very long strings will
be truncated with '...'.
"""
length = self.len
if not length:
return ''
if length > MAX_CHARS * 4:
# Too long for hex. Truncate...
return ''.join(('0x', self._readhex(MAX_CHARS * 4, 0), '...'))
# If it's quite short and we can't do hex then use bin
if length < 32 and length % 4 != 0:
return '0b' + self.bin
# If we can use hex then do so
if not length % 4:
return '0x' + self.hex
# Otherwise first we do as much as we can in hex
# then add on 1, 2 or 3 bits on at the end
bits_at_end = length % 4
return ''.join(('0x', self._readhex(length - bits_at_end, 0),
', ', '0b',
self._readbin(bits_at_end, length - bits_at_end)))
def __repr__(self):
"""Return representation that could be used to recreate the bitstring.
If the returned string is too long it will be truncated. See __str__().
"""
length = self.len
if isinstance(self._datastore._rawarray, MmapByteArray):
offsetstring = ''
if self._datastore.byteoffset or self._offset:
offsetstring = ", offset=%d" % (self._datastore._rawarray.byteoffset * 8 + self._offset)
lengthstring = ", length=%d" % length
return "{0}(filename='{1}'{2}{3})".format(self.__class__.__name__,
self._datastore._rawarray.source.name, lengthstring, offsetstring)
else:
s = self.__str__()
lengthstring = ''
if s.endswith('...'):
lengthstring = " # length={0}".format(length)
return "{0}('{1}'){2}".format(self.__class__.__name__, s, lengthstring)
def __eq__(self, bs):
"""Return True if two bitstrings have the same binary representation.
>>> BitArray('0b1110') == '0xe'
True
"""
try:
bs = Bits(bs)
except TypeError:
return False
return equal(self._datastore, bs._datastore)
def __ne__(self, bs):
"""Return False if two bitstrings have the same binary representation.
>>> BitArray('0b111') == '0x7'
False
"""
return not self.__eq__(bs)
def __invert__(self):
"""Return bitstring with every bit inverted.
Raises Error if the bitstring is empty.
"""
if not self.len:
raise Error("Cannot invert empty bitstring.")
s = self._copy()
s._invert_all()
return s
def __lshift__(self, n):
"""Return bitstring with bits shifted by n to the left.
n -- the number of bits to shift. Must be >= 0.
"""
if n < 0:
raise ValueError("Cannot shift by a negative amount.")
if not self.len:
raise ValueError("Cannot shift an empty bitstring.")
n = min(n, self.len)
s = self._slice(n, self.len)
s._append(Bits(n))
return s
def __rshift__(self, n):
"""Return bitstring with bits shifted by n to the right.
n -- the number of bits to shift. Must be >= 0.
"""
if n < 0:
raise ValueError("Cannot shift by a negative amount.")
if not self.len:
raise ValueError("Cannot shift an empty bitstring.")
if not n:
return self._copy()
s = self.__class__(length=min(n, self.len))
s._append(self[:-n])
return s
def __mul__(self, n):
"""Return bitstring consisting of n concatenations of self.
Called for expression of the form 'a = b*3'.
n -- The number of concatenations. Must be >= 0.
"""
if n < 0:
raise ValueError("Cannot multiply by a negative integer.")
if not n:
return self.__class__()
s = self._copy()
s._imul(n)
return s
def __rmul__(self, n):
"""Return bitstring consisting of n concatenations of self.
Called for expressions of the form 'a = 3*b'.
n -- The number of concatenations. Must be >= 0.
"""
return self.__mul__(n)
def __and__(self, bs):
"""Bit-wise 'and' between two bitstrings. Returns new bitstring.
bs -- The bitstring to '&' with.
Raises ValueError if the two bitstrings have differing lengths.
"""
bs = Bits(bs)
if self.len != bs.len:
raise ValueError("Bitstrings must have the same length "
"for & operator.")
s = self._copy()
s._iand(bs)
return s
def __rand__(self, bs):
"""Bit-wise 'and' between two bitstrings. Returns new bitstring.
bs -- the bitstring to '&' with.
Raises ValueError if the two bitstrings have differing lengths.
"""
return self.__and__(bs)
def __or__(self, bs):
"""Bit-wise 'or' between two bitstrings. Returns new bitstring.
bs -- The bitstring to '|' with.
Raises ValueError if the two bitstrings have differing lengths.
"""
bs = Bits(bs)
if self.len != bs.len:
raise ValueError("Bitstrings must have the same length "
"for | operator.")
s = self._copy()
s._ior(bs)
return s
def __ror__(self, bs):
"""Bit-wise 'or' between two bitstrings. Returns new bitstring.
bs -- The bitstring to '|' with.
Raises ValueError if the two bitstrings have differing lengths.
"""
return self.__or__(bs)
def __xor__(self, bs):
"""Bit-wise 'xor' between two bitstrings. Returns new bitstring.
bs -- The bitstring to '^' with.
Raises ValueError if the two bitstrings have differing lengths.
"""
bs = Bits(bs)
if self.len != bs.len:
raise ValueError("Bitstrings must have the same length "
"for ^ operator.")
s = self._copy()
s._ixor(bs)
return s
def __rxor__(self, bs):
"""Bit-wise 'xor' between two bitstrings. Returns new bitstring.
bs -- The bitstring to '^' with.
Raises ValueError if the two bitstrings have differing lengths.
"""
return self.__xor__(bs)
def __contains__(self, bs):
"""Return whether bs is contained in the current bitstring.
bs -- The bitstring to search for.
"""
# Don't want to change pos
try:
pos = self._pos
except AttributeError:
pass
found = Bits.find(self, bs, bytealigned=False)
try:
self._pos = pos
except AttributeError:
pass
return bool(found)
def __hash__(self):
"""Return an integer hash of the object."""
# We can't in general hash the whole bitstring (it could take hours!)
# So instead take some bits from the start and end.
if self.len <= 160:
# Use the whole bitstring.
shorter = self
else:
# Take 10 bytes from start and end
shorter = self[:80] + self[-80:]
h = 0
for byte in shorter.tobytes():
try:
h = (h << 4) + ord(byte)
except TypeError:
# Python 3
h = (h << 4) + byte
g = h & 0xf0000000
if g & (1 << 31):
h ^= (g >> 24)
h ^= g
return h % 1442968193
# This is only used in Python 2.x...
def __nonzero__(self):
"""Return True if any bits are set to 1, otherwise return False."""
return self.any(True)
# ...whereas this is used in Python 3.x
__bool__ = __nonzero__
def _assertsanity(self):
"""Check internal self consistency as a debugging aid."""
assert self.len >= 0
assert 0 <= self._offset, "offset={0}".format(self._offset)
assert (self.len + self._offset + 7) // 8 == self._datastore.bytelength + self._datastore.byteoffset
return True
@classmethod
def _init_with_token(cls, name, token_length, value):
if token_length is not None:
token_length = int(token_length)
if token_length == 0:
return cls()
# For pad token just return the length in zero bits
if name == 'pad':
return cls(token_length)
if value is None:
if token_length is None:
error = "Token has no value ({0}=???).".format(name)
else:
error = "Token has no value ({0}:{1}=???).".format(name, token_length)
raise ValueError(error)
try:
b = cls(**{_tokenname_to_initialiser[name]: value})
except KeyError:
if name in ('se', 'ue', 'sie', 'uie'):
b = cls(**{name: int(value)})
elif name in ('uint', 'int', 'uintbe', 'intbe', 'uintle', 'intle', 'uintne', 'intne'):
b = cls(**{name: int(value), 'length': token_length})
elif name in ('float', 'floatbe', 'floatle', 'floatne'):
b = cls(**{name: float(value), 'length': token_length})
elif name == 'bool':
if value in (1, 'True', '1'):
b = cls(bool=True)
elif value in (0, 'False', '0'):
b = cls(bool=False)
else:
raise CreationError("bool token can only be 'True' or 'False'.")
else:
raise CreationError("Can't parse token name {0}.", name)
if token_length is not None and b.len != token_length:
msg = "Token with length {0} packed with value of length {1} ({2}:{3}={4})."
raise CreationError(msg, token_length, b.len, name, token_length, value)
return b
def _clear(self):
"""Reset the bitstring to an empty state."""
self._datastore = ByteStore(bytearray(0))
def _setauto(self, s, length, offset):
"""Set bitstring from a bitstring, file, bool, integer, array, iterable or string."""
# As s can be so many different things it's important to do the checks
# in the correct order, as some types are also other allowed types.
# So basestring must be checked before Iterable
# and bytes/bytearray before Iterable but after basestring!
if isinstance(s, Bits):
if length is None:
length = s.len - offset
self._setbytes_unsafe(s._datastore.rawbytes, length, s._offset + offset)
return
if isinstance(s, file):
if offset is None:
offset = 0
if length is None:
length = os.path.getsize(s.name) * 8 - offset
byteoffset, offset = divmod(offset, 8)
bytelength = (length + byteoffset * 8 + offset + 7) // 8 - byteoffset
m = MmapByteArray(s, bytelength, byteoffset)
if length + byteoffset * 8 + offset > m.filelength * 8:
raise CreationError("File is not long enough for specified "
"length and offset.")
self._datastore = ConstByteStore(m, length, offset)
return
if length is not None:
raise CreationError("The length keyword isn't applicable to this initialiser.")
if offset:
raise CreationError("The offset keyword isn't applicable to this initialiser.")
if isinstance(s, basestring):
bs = self._converttobitstring(s)
assert bs._offset == 0
self._setbytes_unsafe(bs._datastore.rawbytes, bs.length, 0)
return
if isinstance(s, (bytes, bytearray)):
self._setbytes_unsafe(bytearray(s), len(s) * 8, 0)
return
if isinstance(s, array.array):
b = s.tostring()
self._setbytes_unsafe(bytearray(b), len(b) * 8, 0)
return
if isinstance(s, numbers.Integral):
# Initialise with s zero bits.
if s < 0:
msg = "Can't create bitstring of negative length {0}."
raise CreationError(msg, s)
data = bytearray((s + 7) // 8)
self._datastore = ByteStore(data, s, 0)
return
if isinstance(s, collections.Iterable):
# Evaluate each item as True or False and set bits to 1 or 0.
self._setbin_unsafe(''.join(str(int(bool(x))) for x in s))
return
raise TypeError("Cannot initialise bitstring from {0}.".format(type(s)))
def _setfile(self, filename, length, offset):
"""Use file as source of bits."""
source = open(filename, 'rb')
if offset is None:
offset = 0
if length is None:
length = os.path.getsize(source.name) * 8 - offset
byteoffset, offset = divmod(offset, 8)
bytelength = (length + byteoffset * 8 + offset + 7) // 8 - byteoffset
m = MmapByteArray(source, bytelength, byteoffset)
if length + byteoffset * 8 + offset > m.filelength * 8:
raise CreationError("File is not long enough for specified "
"length and offset.")
self._datastore = ConstByteStore(m, length, offset)
def _setbytes_safe(self, data, length=None, offset=0):
"""Set the data from a string."""
data = bytearray(data)
if length is None:
# Use to the end of the data
length = len(data)*8 - offset
self._datastore = ByteStore(data, length, offset)
else:
if length + offset > len(data) * 8:
msg = "Not enough data present. Need {0} bits, have {1}."
raise CreationError(msg, length + offset, len(data) * 8)
if length == 0:
self._datastore = ByteStore(bytearray(0))
else:
self._datastore = ByteStore(data, length, offset)
def _setbytes_unsafe(self, data, length, offset):
"""Unchecked version of _setbytes_safe."""
self._datastore = ByteStore(data[:], length, offset)
assert self._assertsanity()
def _readbytes(self, length, start):
"""Read bytes and return them. Note that length is in bits."""
assert length % 8 == 0
assert start + length <= self.len
if not (start + self._offset) % 8:
return bytes(self._datastore.getbyteslice((start + self._offset) // 8,
(start + self._offset + length) // 8))
return self._slice(start, start + length).tobytes()
def _getbytes(self):
"""Return the data as an ordinary string."""
if self.len % 8:
raise InterpretError("Cannot interpret as bytes unambiguously - "
"not multiple of 8 bits.")
return self._readbytes(self.len, 0)
def _setuint(self, uint, length=None):
"""Reset the bitstring to have given unsigned int interpretation."""
try:
if length is None:
# Use the whole length. Deliberately not using .len here.
length = self._datastore.bitlength
except AttributeError:
# bitstring doesn't have a _datastore as it hasn't been created!
pass
# TODO: All this checking code should be hoisted out of here!
if length is None or length == 0:
raise CreationError("A non-zero length must be specified with a "
"uint initialiser.")
if uint >= (1 << length):
msg = "{0} is too large an unsigned integer for a bitstring of length {1}. "\
"The allowed range is [0, {2}]."
raise CreationError(msg, uint, length, (1 << length) - 1)
if uint < 0:
raise CreationError("uint cannot be initialsed by a negative number.")
s = hex(uint)[2:]
s = s.rstrip('L')
if len(s) & 1:
s = '0' + s
try:
data = bytes.fromhex(s)
except AttributeError:
# the Python 2.x way
data = binascii.unhexlify(s)
# Now add bytes as needed to get the right length.
extrabytes = ((length + 7) // 8) - len(data)
if extrabytes > 0:
data = b'\x00' * extrabytes + data
offset = 8 - (length % 8)
if offset == 8:
offset = 0
self._setbytes_unsafe(bytearray(data), length, offset)
def _readuint(self, length, start):
"""Read bits and interpret as an unsigned int."""
if not length:
raise InterpretError("Cannot interpret a zero length bitstring "
"as an integer.")
offset = self._offset
startbyte = (start + offset) // 8
endbyte = (start + offset + length - 1) // 8
b = binascii.hexlify(bytes(self._datastore.getbyteslice(startbyte, endbyte + 1)))
assert b
i = int(b, 16)
final_bits = 8 - ((start + offset + length) % 8)
if final_bits != 8:
i >>= final_bits
i &= (1 << length) - 1
return i
def _getuint(self):
"""Return data as an unsigned int."""
return self._readuint(self.len, 0)
def _setint(self, int_, length=None):
"""Reset the bitstring to have given signed int interpretation."""
# If no length given, and we've previously been given a length, use it.
if length is None and hasattr(self, 'len') and self.len != 0:
length = self.len
if length is None or length == 0:
raise CreationError("A non-zero length must be specified with an int initialiser.")
if int_ >= (1 << (length - 1)) or int_ < -(1 << (length - 1)):
raise CreationError("{0} is too large a signed integer for a bitstring of length {1}. "
"The allowed range is [{2}, {3}].", int_, length, -(1 << (length - 1)),
(1 << (length - 1)) - 1)
if int_ >= 0:
self._setuint(int_, length)
return
# TODO: We should decide whether to just use the _setuint, or to do the bit flipping,
# based upon which will be quicker. If the -ive number is less than half the maximum
# possible then it's probably quicker to do the bit flipping...
# Do the 2's complement thing. Add one, set to minus number, then flip bits.
int_ += 1
self._setuint(-int_, length)
self._invert_all()
def _readint(self, length, start):
"""Read bits and interpret as a signed int"""
ui = self._readuint(length, start)
if not ui >> (length - 1):
# Top bit not set, number is positive
return ui
# Top bit is set, so number is negative
tmp = (~(ui - 1)) & ((1 << length) - 1)
return -tmp
def _getint(self):
"""Return data as a two's complement signed int."""
return self._readint(self.len, 0)
def _setuintbe(self, uintbe, length=None):
"""Set the bitstring to a big-endian unsigned int interpretation."""
if length is not None and length % 8 != 0:
raise CreationError("Big-endian integers must be whole-byte. "
"Length = {0} bits.", length)
self._setuint(uintbe, length)
def _readuintbe(self, length, start):
"""Read bits and interpret as a big-endian unsigned int."""
if length % 8:
raise InterpretError("Big-endian integers must be whole-byte. "
"Length = {0} bits.", length)
return self._readuint(length, start)
def _getuintbe(self):
"""Return data as a big-endian two's complement unsigned int."""
return self._readuintbe(self.len, 0)
def _setintbe(self, intbe, length=None):
"""Set bitstring to a big-endian signed int interpretation."""
if length is not None and length % 8 != 0:
raise CreationError("Big-endian integers must be whole-byte. "
"Length = {0} bits.", length)
self._setint(intbe, length)
def _readintbe(self, length, start):
"""Read bits and interpret as a big-endian signed int."""
if length % 8:
raise InterpretError("Big-endian integers must be whole-byte. "
"Length = {0} bits.", length)
return self._readint(length, start)
def _getintbe(self):
"""Return data as a big-endian two's complement signed int."""
return self._readintbe(self.len, 0)
def _setuintle(self, uintle, length=None):
if length is not None and length % 8 != 0:
raise CreationError("Little-endian integers must be whole-byte. "
"Length = {0} bits.", length)
self._setuint(uintle, length)
self._reversebytes(0, self.len)
def _readuintle(self, length, start):
"""Read bits and interpret as a little-endian unsigned int."""
if length % 8:
raise InterpretError("Little-endian integers must be whole-byte. "
"Length = {0} bits.", length)
assert start + length <= self.len
absolute_pos = start + self._offset
startbyte, offset = divmod(absolute_pos, 8)
val = 0
if not offset:
endbyte = (absolute_pos + length - 1) // 8
chunksize = 4 # for 'L' format
while endbyte - chunksize + 1 >= startbyte:
val <<= 8 * chunksize
val += struct.unpack('<L', bytes(self._datastore.getbyteslice(endbyte + 1 - chunksize, endbyte + 1)))[0]
endbyte -= chunksize
for b in xrange(endbyte, startbyte - 1, -1):
val <<= 8
val += self._datastore.getbyte(b)
else:
data = self._slice(start, start + length)
assert data.len % 8 == 0
data._reversebytes(0, self.len)
for b in bytearray(data.bytes):
val <<= 8
val += b
return val
def _getuintle(self):
return self._readuintle(self.len, 0)
def _setintle(self, intle, length=None):
if length is not None and length % 8 != 0:
raise CreationError("Little-endian integers must be whole-byte. "
"Length = {0} bits.", length)
self._setint(intle, length)
self._reversebytes(0, self.len)
def _readintle(self, length, start):
"""Read bits and interpret as a little-endian signed int."""
ui = self._readuintle(length, start)
if not ui >> (length - 1):
# Top bit not set, number is positive
return ui
# Top bit is set, so number is negative
tmp = (~(ui - 1)) & ((1 << length) - 1)
return -tmp
def _getintle(self):
return self._readintle(self.len, 0)
def _setfloat(self, f, length=None):
# If no length given, and we've previously been given a length, use it.
if length is None and hasattr(self, 'len') and self.len != 0:
length = self.len
if length is None or length == 0:
raise CreationError("A non-zero length must be specified with a "
"float initialiser.")
if length == 32:
b = struct.pack('>f', f)
elif length == 64:
b = struct.pack('>d', f)
else:
raise CreationError("floats can only be 32 or 64 bits long, "
"not {0} bits", length)
self._setbytes_unsafe(bytearray(b), length, 0)
def _readfloat(self, length, start):
"""Read bits and interpret as a float."""
if not (start + self._offset) % 8:
startbyte = (start + self._offset) // 8
if length == 32:
f, = struct.unpack('>f', bytes(self._datastore.getbyteslice(startbyte, startbyte + 4)))
elif length == 64:
f, = struct.unpack('>d', bytes(self._datastore.getbyteslice(startbyte, startbyte + 8)))
else:
if length == 32:
f, = struct.unpack('>f', self._readbytes(32, start))
elif length == 64:
f, = struct.unpack('>d', self._readbytes(64, start))
try:
return f
except NameError:
raise InterpretError("floats can only be 32 or 64 bits long, not {0} bits", length)
def _getfloat(self):
"""Interpret the whole bitstring as a float."""
return self._readfloat(self.len, 0)
def _setfloatle(self, f, length=None):
# If no length given, and we've previously been given a length, use it.
if length is None and hasattr(self, 'len') and self.len != 0:
length = self.len
if length is None or length == 0:
raise CreationError("A non-zero length must be specified with a "
"float initialiser.")
if length == 32:
b = struct.pack('<f', f)
elif length == 64:
b = struct.pack('<d', f)
else:
raise CreationError("floats can only be 32 or 64 bits long, "
"not {0} bits", length)
self._setbytes_unsafe(bytearray(b), length, 0)
def _readfloatle(self, length, start):
"""Read bits and interpret as a little-endian float."""
startbyte, offset = divmod(start + self._offset, 8)
if not offset:
if length == 32:
f, = struct.unpack('<f', bytes(self._datastore.getbyteslice(startbyte, startbyte + 4)))
elif length == 64:
f, = struct.unpack('<d', bytes(self._datastore.getbyteslice(startbyte, startbyte + 8)))
else:
if length == 32:
f, = struct.unpack('<f', self._readbytes(32, start))
elif length == 64:
f, = struct.unpack('<d', self._readbytes(64, start))
try:
return f
except NameError:
raise InterpretError("floats can only be 32 or 64 bits long, "
"not {0} bits", length)
def _getfloatle(self):
"""Interpret the whole bitstring as a little-endian float."""
return self._readfloatle(self.len, 0)
def _setue(self, i):
"""Initialise bitstring with unsigned exponential-Golomb code for integer i.
Raises CreationError if i < 0.
"""
if i < 0:
raise CreationError("Cannot use negative initialiser for unsigned "
"exponential-Golomb.")
if not i:
self._setbin_unsafe('1')
return
tmp = i + 1
leadingzeros = -1
while tmp > 0:
tmp >>= 1
leadingzeros += 1
remainingpart = i + 1 - (1 << leadingzeros)
binstring = '0' * leadingzeros + '1' + Bits(uint=remainingpart,
length=leadingzeros).bin
self._setbin_unsafe(binstring)
def _readue(self, pos):
"""Return interpretation of next bits as unsigned exponential-Golomb code.
Raises ReadError if the end of the bitstring is encountered while
reading the code.
"""
oldpos = pos
try:
while not self[pos]:
pos += 1
except IndexError:
raise ReadError("Read off end of bitstring trying to read code.")
leadingzeros = pos - oldpos
codenum = (1 << leadingzeros) - 1
if leadingzeros > 0:
if pos + leadingzeros + 1 > self.len:
raise ReadError("Read off end of bitstring trying to read code.")
codenum += self._readuint(leadingzeros, pos + 1)
pos += leadingzeros + 1
else:
assert codenum == 0
pos += 1
return codenum, pos
def _getue(self):
"""Return data as unsigned exponential-Golomb code.
Raises InterpretError if bitstring is not a single exponential-Golomb code.
"""
try:
value, newpos = self._readue(0)
if value is None or newpos != self.len:
raise ReadError
except ReadError:
raise InterpretError("Bitstring is not a single exponential-Golomb code.")
return value
def _setse(self, i):
"""Initialise bitstring with signed exponential-Golomb code for integer i."""
if i > 0:
u = (i * 2) - 1
else:
u = -2 * i
self._setue(u)
def _getse(self):
"""Return data as signed exponential-Golomb code.
Raises InterpretError if bitstring is not a single exponential-Golomb code.
"""
try:
value, newpos = self._readse(0)
if value is None or newpos != self.len:
raise ReadError
except ReadError:
raise InterpretError("Bitstring is not a single exponential-Golomb code.")
return value
def _readse(self, pos):
"""Return interpretation of next bits as a signed exponential-Golomb code.
Advances position to after the read code.
Raises ReadError if the end of the bitstring is encountered while
reading the code.
"""
codenum, pos = self._readue(pos)
m = (codenum + 1) // 2
if not codenum % 2:
return -m, pos
else:
return m, pos
def _setuie(self, i):
"""Initialise bitstring with unsigned interleaved exponential-Golomb code for integer i.
Raises CreationError if i < 0.
"""
if i < 0:
raise CreationError("Cannot use negative initialiser for unsigned "
"interleaved exponential-Golomb.")
self._setbin_unsafe('1' if i == 0 else '0' + '0'.join(bin(i + 1)[3:]) + '1')
def _readuie(self, pos):
"""Return interpretation of next bits as unsigned interleaved exponential-Golomb code.
Raises ReadError if the end of the bitstring is encountered while
reading the code.
"""
try:
codenum = 1
while not self[pos]:
pos += 1
codenum <<= 1
codenum += self[pos]
pos += 1
pos += 1
except IndexError:
raise ReadError("Read off end of bitstring trying to read code.")
codenum -= 1
return codenum, pos
def _getuie(self):
"""Return data as unsigned interleaved exponential-Golomb code.
Raises InterpretError if bitstring is not a single exponential-Golomb code.
"""
try:
value, newpos = self._readuie(0)
if value is None or newpos != self.len:
raise ReadError
except ReadError:
raise InterpretError("Bitstring is not a single interleaved exponential-Golomb code.")
return value
def _setsie(self, i):
"""Initialise bitstring with signed interleaved exponential-Golomb code for integer i."""
if not i:
self._setbin_unsafe('1')
else:
self._setuie(abs(i))
self._append(Bits([i < 0]))
def _getsie(self):
"""Return data as signed interleaved exponential-Golomb code.
Raises InterpretError if bitstring is not a single exponential-Golomb code.
"""
try:
value, newpos = self._readsie(0)
if value is None or newpos != self.len:
raise ReadError
except ReadError:
raise InterpretError("Bitstring is not a single interleaved exponential-Golomb code.")
return value
def _readsie(self, pos):
"""Return interpretation of next bits as a signed interleaved exponential-Golomb code.
Advances position to after the read code.
Raises ReadError if the end of the bitstring is encountered while
reading the code.
"""
codenum, pos = self._readuie(pos)
if not codenum:
return 0, pos
try:
if self[pos]:
return -codenum, pos + 1
else:
return codenum, pos + 1
except IndexError:
raise ReadError("Read off end of bitstring trying to read code.")
def _setbool(self, value):
# We deliberately don't want to have implicit conversions to bool here.
# If we did then it would be difficult to deal with the 'False' string.
if value in (1, 'True'):
self._setbytes_unsafe(bytearray(b'\x80'), 1, 0)
elif value in (0, 'False'):
self._setbytes_unsafe(bytearray(b'\x00'), 1, 0)
else:
raise CreationError('Cannot initialise boolean with {0}.', value)
def _getbool(self):
if self.length != 1:
msg = "For a bool interpretation a bitstring must be 1 bit long, not {0} bits."
raise InterpretError(msg, self.length)
return self[0]
def _readbool(self, pos):
return self[pos], pos + 1
def _setbin_safe(self, binstring):
"""Reset the bitstring to the value given in binstring."""
binstring = tidy_input_string(binstring)
# remove any 0b if present
binstring = binstring.replace('0b', '')
self._setbin_unsafe(binstring)
def _setbin_unsafe(self, binstring):
"""Same as _setbin_safe, but input isn't sanity checked. binstring mustn't start with '0b'."""
length = len(binstring)
# pad with zeros up to byte boundary if needed
boundary = ((length + 7) // 8) * 8
padded_binstring = binstring + '0' * (boundary - length)\
if len(binstring) < boundary else binstring
try:
bytelist = [int(padded_binstring[x:x + 8], 2)
for x in xrange(0, len(padded_binstring), 8)]
except ValueError:
raise CreationError("Invalid character in bin initialiser {0}.", binstring)
self._setbytes_unsafe(bytearray(bytelist), length, 0)
def _readbin(self, length, start):
"""Read bits and interpret as a binary string."""
if not length:
return ''
# Get the byte slice containing our bit slice
startbyte, startoffset = divmod(start + self._offset, 8)
endbyte = (start + self._offset + length - 1) // 8
b = self._datastore.getbyteslice(startbyte, endbyte + 1)
# Convert to a string of '0' and '1's (via a hex string an and int!)
try:
c = "{:0{}b}".format(int(binascii.hexlify(b), 16), 8*len(b))
except TypeError:
# Hack to get Python 2.6 working
c = "{0:0{1}b}".format(int(binascii.hexlify(str(b)), 16), 8*len(b))
# Finally chop off any extra bits.
return c[startoffset:startoffset + length]
def _getbin(self):
"""Return interpretation as a binary string."""
return self._readbin(self.len, 0)
def _setoct(self, octstring):
"""Reset the bitstring to have the value given in octstring."""
octstring = tidy_input_string(octstring)
# remove any 0o if present
octstring = octstring.replace('0o', '')
binlist = []
for i in octstring:
try:
if not 0 <= int(i) < 8:
raise ValueError
binlist.append(OCT_TO_BITS[int(i)])
except ValueError:
raise CreationError("Invalid symbol '{0}' in oct initialiser.", i)
self._setbin_unsafe(''.join(binlist))
def _readoct(self, length, start):
"""Read bits and interpret as an octal string."""
if length % 3:
raise InterpretError("Cannot convert to octal unambiguously - "
"not multiple of 3 bits.")
if not length:
return ''
# Get main octal bit by converting from int.
# Strip starting 0 or 0o depending on Python version.
end = oct(self._readuint(length, start))[LEADING_OCT_CHARS:]
if end.endswith('L'):
end = end[:-1]
middle = '0' * (length // 3 - len(end))
return middle + end
def _getoct(self):
"""Return interpretation as an octal string."""
return self._readoct(self.len, 0)
def _sethex(self, hexstring):
"""Reset the bitstring to have the value given in hexstring."""
hexstring = tidy_input_string(hexstring)
# remove any 0x if present
hexstring = hexstring.replace('0x', '')
length = len(hexstring)
if length % 2:
hexstring += '0'
try:
try:
data = bytearray.fromhex(hexstring)
except TypeError:
# Python 2.6 needs a unicode string (a bug). 2.7 and 3.x work fine.
data = bytearray.fromhex(unicode(hexstring))
except ValueError:
raise CreationError("Invalid symbol in hex initialiser.")
self._setbytes_unsafe(data, length * 4, 0)
def _readhex(self, length, start):
"""Read bits and interpret as a hex string."""
if length % 4:
raise InterpretError("Cannot convert to hex unambiguously - "
"not multiple of 4 bits.")
if not length:
return ''
s = self._slice(start, start + length).tobytes()
try:
s = s.hex() # Available in Python 3.5
except AttributeError:
# This monstrosity is the only thing I could get to work for both 2.6 and 3.1.
# TODO: Is utf-8 really what we mean here?
s = str(binascii.hexlify(s).decode('utf-8'))
# If there's one nibble too many then cut it off
return s[:-1] if (length // 4) % 2 else s
def _gethex(self):
"""Return the hexadecimal representation as a string prefixed with '0x'.
Raises an InterpretError if the bitstring's length is not a multiple of 4.
"""
return self._readhex(self.len, 0)
def _getoffset(self):
return self._datastore.offset
def _getlength(self):
"""Return the length of the bitstring in bits."""
return self._datastore.bitlength
def _ensureinmemory(self):
"""Ensure the data is held in memory, not in a file."""
self._setbytes_unsafe(self._datastore.getbyteslice(0, self._datastore.bytelength),
self.len, self._offset)
@classmethod
def _converttobitstring(cls, bs, offset=0, cache={}):
"""Convert bs to a bitstring and return it.
offset gives the suggested bit offset of first significant
bit, to optimise append etc.
"""
if isinstance(bs, Bits):
return bs
try:
return cache[(bs, offset)]
except KeyError:
if isinstance(bs, basestring):
b = cls()
try:
_, tokens = tokenparser(bs)
except ValueError as e:
raise CreationError(*e.args)
if tokens:
b._append(Bits._init_with_token(*tokens[0]))
b._datastore = offsetcopy(b._datastore, offset)
for token in tokens[1:]:
b._append(Bits._init_with_token(*token))
assert b._assertsanity()
assert b.len == 0 or b._offset == offset
if len(cache) < CACHE_SIZE:
cache[(bs, offset)] = b
return b
except TypeError:
# Unhashable type
pass
return cls(bs)
def _copy(self):
"""Create and return a new copy of the Bits (always in memory)."""
s_copy = self.__class__()
s_copy._setbytes_unsafe(self._datastore.getbyteslice(0, self._datastore.bytelength),
self.len, self._offset)
return s_copy
def _slice(self, start, end):
"""Used internally to get a slice, without error checking."""
if end == start:
return self.__class__()
offset = self._offset
startbyte, newoffset = divmod(start + offset, 8)
endbyte = (end + offset - 1) // 8
bs = self.__class__()
bs._setbytes_unsafe(self._datastore.getbyteslice(startbyte, endbyte + 1), end - start, newoffset)
return bs
def _readtoken(self, name, pos, length):
"""Reads a token from the bitstring and returns the result."""
if length is not None and int(length) > self.length - pos:
raise ReadError("Reading off the end of the data. "
"Tried to read {0} bits when only {1} available.".format(int(length), self.length - pos))
try:
val = name_to_read[name](self, length, pos)
return val, pos + length
except KeyError:
if name == 'pad':
return None, pos + length
raise ValueError("Can't parse token {0}:{1}".format(name, length))
except TypeError:
# This is for the 'ue', 'se' and 'bool' tokens. They will also return the new pos.
return name_to_read[name](self, pos)
def _append(self, bs):
"""Append a bitstring to the current bitstring."""
self._datastore._appendstore(bs._datastore)
def _prepend(self, bs):
"""Prepend a bitstring to the current bitstring."""
self._datastore._prependstore(bs._datastore)
def _reverse(self):
"""Reverse all bits in-place."""
# Reverse the contents of each byte
n = [BYTE_REVERSAL_DICT[b] for b in self._datastore.rawbytes]
# Then reverse the order of the bytes
n.reverse()
# The new offset is the number of bits that were unused at the end.
newoffset = 8 - (self._offset + self.len) % 8
if newoffset == 8:
newoffset = 0
self._setbytes_unsafe(bytearray().join(n), self.length, newoffset)
def _truncatestart(self, bits):
"""Truncate bits from the start of the bitstring."""
assert 0 <= bits <= self.len
if not bits:
return
if bits == self.len:
self._clear()
return
bytepos, offset = divmod(self._offset + bits, 8)
self._setbytes_unsafe(self._datastore.getbyteslice(bytepos, self._datastore.bytelength), self.len - bits,
offset)
assert self._assertsanity()
def _truncateend(self, bits):
"""Truncate bits from the end of the bitstring."""
assert 0 <= bits <= self.len
if not bits:
return
if bits == self.len:
self._clear()
return
newlength_in_bytes = (self._offset + self.len - bits + 7) // 8
self._setbytes_unsafe(self._datastore.getbyteslice(0, newlength_in_bytes), self.len - bits,
self._offset)
assert self._assertsanity()
def _insert(self, bs, pos):
"""Insert bs at pos."""
assert 0 <= pos <= self.len
if pos > self.len // 2:
# Inserting nearer end, so cut off end.
end = self._slice(pos, self.len)
self._truncateend(self.len - pos)
self._append(bs)
self._append(end)
else:
# Inserting nearer start, so cut off start.
start = self._slice(0, pos)
self._truncatestart(pos)
self._prepend(bs)
self._prepend(start)
try:
self._pos = pos + bs.len
except AttributeError:
pass
assert self._assertsanity()
def _overwrite(self, bs, pos):
"""Overwrite with bs at pos."""
assert 0 <= pos < self.len
if bs is self:
# Just overwriting with self, so do nothing.
assert pos == 0
return
firstbytepos = (self._offset + pos) // 8
lastbytepos = (self._offset + pos + bs.len - 1) // 8
bytepos, bitoffset = divmod(self._offset + pos, 8)
if firstbytepos == lastbytepos:
mask = ((1 << bs.len) - 1) << (8 - bs.len - bitoffset)
self._datastore.setbyte(bytepos, self._datastore.getbyte(bytepos) & (~mask))
d = offsetcopy(bs._datastore, bitoffset)
self._datastore.setbyte(bytepos, self._datastore.getbyte(bytepos) | (d.getbyte(0) & mask))
else:
# Do first byte
mask = (1 << (8 - bitoffset)) - 1
self._datastore.setbyte(bytepos, self._datastore.getbyte(bytepos) & (~mask))
d = offsetcopy(bs._datastore, bitoffset)
self._datastore.setbyte(bytepos, self._datastore.getbyte(bytepos) | (d.getbyte(0) & mask))
# Now do all the full bytes
self._datastore.setbyteslice(firstbytepos + 1, lastbytepos, d.getbyteslice(1, lastbytepos - firstbytepos))
# and finally the last byte
bitsleft = (self._offset + pos + bs.len) % 8
if not bitsleft:
bitsleft = 8
mask = (1 << (8 - bitsleft)) - 1
self._datastore.setbyte(lastbytepos, self._datastore.getbyte(lastbytepos) & mask)
self._datastore.setbyte(lastbytepos,
self._datastore.getbyte(lastbytepos) | (d.getbyte(d.bytelength - 1) & ~mask))
assert self._assertsanity()
def _delete(self, bits, pos):
"""Delete bits at pos."""
assert 0 <= pos <= self.len
assert pos + bits <= self.len
if not pos:
# Cutting bits off at the start.
self._truncatestart(bits)
return
if pos + bits == self.len:
# Cutting bits off at the end.
self._truncateend(bits)
return
if pos > self.len - pos - bits:
# More bits before cut point than after it, so do bit shifting
# on the final bits.
end = self._slice(pos + bits, self.len)
assert self.len - pos > 0
self._truncateend(self.len - pos)
self._append(end)
return
# More bits after the cut point than before it.
start = self._slice(0, pos)
self._truncatestart(pos + bits)
self._prepend(start)
return
def _reversebytes(self, start, end):
"""Reverse bytes in-place."""
# Make the start occur on a byte boundary
# TODO: We could be cleverer here to avoid changing the offset.
newoffset = 8 - (start % 8)
if newoffset == 8:
newoffset = 0
self._datastore = offsetcopy(self._datastore, newoffset)
# Now just reverse the byte data
toreverse = bytearray(self._datastore.getbyteslice((newoffset + start) // 8, (newoffset + end) // 8))
toreverse.reverse()
self._datastore.setbyteslice((newoffset + start) // 8, (newoffset + end) // 8, toreverse)
def _set(self, pos):
"""Set bit at pos to 1."""
assert 0 <= pos < self.len
self._datastore.setbit(pos)
def _unset(self, pos):
"""Set bit at pos to 0."""
assert 0 <= pos < self.len
self._datastore.unsetbit(pos)
def _invert(self, pos):
"""Flip bit at pos 1<->0."""
assert 0 <= pos < self.len
self._datastore.invertbit(pos)
def _invert_all(self):
"""Invert every bit."""
set = self._datastore.setbyte
get = self._datastore.getbyte
for p in xrange(self._datastore.byteoffset, self._datastore.byteoffset + self._datastore.bytelength):
set(p, 256 + ~get(p))
def _ilshift(self, n):
"""Shift bits by n to the left in place. Return self."""
assert 0 < n <= self.len
self._append(Bits(n))
self._truncatestart(n)
return self
def _irshift(self, n):
"""Shift bits by n to the right in place. Return self."""
assert 0 < n <= self.len
self._prepend(Bits(n))
self._truncateend(n)
return self
def _imul(self, n):
"""Concatenate n copies of self in place. Return self."""
assert n >= 0
if not n:
self._clear()
return self
m = 1
old_len = self.len
while m * 2 < n:
self._append(self)
m *= 2
self._append(self[0:(n - m) * old_len])
return self
def _inplace_logical_helper(self, bs, f):
"""Helper function containing most of the __ior__, __iand__, __ixor__ code."""
# Give the two bitstrings the same offset (modulo 8)
self_byteoffset, self_bitoffset = divmod(self._offset, 8)
bs_byteoffset, bs_bitoffset = divmod(bs._offset, 8)
if bs_bitoffset != self_bitoffset:
if not self_bitoffset:
bs._datastore = offsetcopy(bs._datastore, 0)
else:
self._datastore = offsetcopy(self._datastore, bs_bitoffset)
a = self._datastore.rawbytes
b = bs._datastore.rawbytes
for i in xrange(len(a)):
a[i] = f(a[i + self_byteoffset], b[i + bs_byteoffset])
return self
def _ior(self, bs):
return self._inplace_logical_helper(bs, operator.ior)
def _iand(self, bs):
return self._inplace_logical_helper(bs, operator.iand)
def _ixor(self, bs):
return self._inplace_logical_helper(bs, operator.xor)
def _readbits(self, length, start):
"""Read some bits from the bitstring and return newly constructed bitstring."""
return self._slice(start, start + length)
def _validate_slice(self, start, end):
"""Validate start and end and return them as positive bit positions."""
if start is None:
start = 0
elif start < 0:
start += self.len
if end is None:
end = self.len
elif end < 0:
end += self.len
if not 0 <= end <= self.len:
raise ValueError("end is not a valid position in the bitstring.")
if not 0 <= start <= self.len:
raise ValueError("start is not a valid position in the bitstring.")
if end < start:
raise ValueError("end must not be less than start.")
return start, end
def unpack(self, fmt, **kwargs):
"""Interpret the whole bitstring using fmt and return list.
fmt -- A single string or a list of strings with comma separated tokens
describing how to interpret the bits in the bitstring. Items
can also be integers, for reading new bitstring of the given length.
kwargs -- A dictionary or keyword-value pairs - the keywords used in the
format string will be replaced with their given value.
Raises ValueError if the format is not understood. If not enough bits
are available then all bits to the end of the bitstring will be used.
See the docstring for 'read' for token examples.
"""
return self._readlist(fmt, 0, **kwargs)[0]
def _readlist(self, fmt, pos, **kwargs):
tokens = []
stretchy_token = None
if isinstance(fmt, basestring):
fmt = [fmt]
# Not very optimal this, but replace integers with 'bits' tokens
# TODO: optimise
for i, f in enumerate(fmt):
if isinstance(f, numbers.Integral):
fmt[i] = "bits:{0}".format(f)
for f_item in fmt:
stretchy, tkns = tokenparser(f_item, tuple(sorted(kwargs.keys())))
if stretchy:
if stretchy_token:
raise Error("It's not possible to have more than one 'filler' token.")
stretchy_token = stretchy
tokens.extend(tkns)
if not stretchy_token:
lst = []
for name, length, _ in tokens:
if length in kwargs:
length = kwargs[length]
if name == 'bytes':
length *= 8
if name in kwargs and length is None:
# Using default 'uint' - the name is really the length.
value, pos = self._readtoken('uint', pos, kwargs[name])
lst.append(value)
continue
value, pos = self._readtoken(name, pos, length)
if value is not None: # Don't append pad tokens
lst.append(value)
return lst, pos
stretchy_token = False
bits_after_stretchy_token = 0
for token in tokens:
name, length, _ = token
if length in kwargs:
length = kwargs[length]
if name == 'bytes':
length *= 8
if name in kwargs and length is None:
# Default 'uint'.
length = kwargs[name]
if stretchy_token:
if name in ('se', 'ue', 'sie', 'uie'):
raise Error("It's not possible to parse a variable"
"length token after a 'filler' token.")
else:
if length is None:
raise Error("It's not possible to have more than "
"one 'filler' token.")
bits_after_stretchy_token += length
if length is None and name not in ('se', 'ue', 'sie', 'uie'):
assert not stretchy_token
stretchy_token = token
bits_left = self.len - pos
return_values = []
for token in tokens:
name, length, _ = token
if token is stretchy_token:
# Set length to the remaining bits
length = max(bits_left - bits_after_stretchy_token, 0)
if length in kwargs:
length = kwargs[length]
if name == 'bytes':
length *= 8
if name in kwargs and length is None:
# Default 'uint'
length = kwargs[name]
if length is not None:
bits_left -= length
value, pos = self._readtoken(name, pos, length)
if value is not None:
return_values.append(value)
return return_values, pos
def _findbytes(self, bytes_, start, end, bytealigned):
"""Quicker version of find when everything's whole byte
and byte aligned.
"""
assert self._datastore.offset == 0
assert bytealigned is True
# Extract data bytes from bitstring to be found.
bytepos = (start + 7) // 8
found = False
p = bytepos
finalpos = end // 8
increment = max(1024, len(bytes_) * 10)
buffersize = increment + len(bytes_)
while p < finalpos:
# Read in file or from memory in overlapping chunks and search the chunks.
buf = bytearray(self._datastore.getbyteslice(p, min(p + buffersize, finalpos)))
pos = buf.find(bytes_)
if pos != -1:
found = True
p += pos
break
p += increment
if not found:
return ()
return (p * 8,)
def _findregex(self, reg_ex, start, end, bytealigned):
"""Find first occurrence of a compiled regular expression.
Note that this doesn't support arbitrary regexes, in particular they
must match a known length.
"""
p = start
length = len(reg_ex.pattern)
# We grab overlapping chunks of the binary representation and
# do an ordinary string search within that.
increment = max(4096, length * 10)
buffersize = increment + length
while p < end:
buf = self._readbin(min(buffersize, end - p), p)
# Test using regular expressions...
m = reg_ex.search(buf)
if m:
pos = m.start()
# pos = buf.find(targetbin)
# if pos != -1:
# if bytealigned then we only accept byte aligned positions.
if not bytealigned or (p + pos) % 8 == 0:
return (p + pos,)
if bytealigned:
# Advance to just beyond the non-byte-aligned match and try again...
p += pos + 1
continue
p += increment
# Not found, return empty tuple
return ()
def find(self, bs, start=None, end=None, bytealigned=None):
"""Find first occurrence of substring bs.
Returns a single item tuple with the bit position if found, or an
empty tuple if not found. The bit position (pos property) will
also be set to the start of the substring if it is found.
bs -- The bitstring to find.
start -- The bit position to start the search. Defaults to 0.
end -- The bit position one past the last bit to search.
Defaults to self.len.
bytealigned -- If True the bitstring will only be
found on byte boundaries.
Raises ValueError if bs is empty, if start < 0, if end > self.len or
if end < start.
>>> BitArray('0xc3e').find('0b1111')
(6,)
"""
bs = Bits(bs)
if not bs.len:
raise ValueError("Cannot find an empty bitstring.")
start, end = self._validate_slice(start, end)
if bytealigned is None:
bytealigned = globals()['bytealigned']
if bytealigned and not bs.len % 8 and not self._datastore.offset:
p = self._findbytes(bs.bytes, start, end, bytealigned)
else:
p = self._findregex(re.compile(bs._getbin()), start, end, bytealigned)
# If called from a class that has a pos, set it
try:
self._pos = p[0]
except (AttributeError, IndexError):
pass
return p
def findall(self, bs, start=None, end=None, count=None, bytealigned=None):
"""Find all occurrences of bs. Return generator of bit positions.
bs -- The bitstring to find.
start -- The bit position to start the search. Defaults to 0.
end -- The bit position one past the last bit to search.
Defaults to self.len.
count -- The maximum number of occurrences to find.
bytealigned -- If True the bitstring will only be found on
byte boundaries.
Raises ValueError if bs is empty, if start < 0, if end > self.len or
if end < start.
Note that all occurrences of bs are found, even if they overlap.
"""
if count is not None and count < 0:
raise ValueError("In findall, count must be >= 0.")
bs = Bits(bs)
start, end = self._validate_slice(start, end)
if bytealigned is None:
bytealigned = globals()['bytealigned']
c = 0
if bytealigned and not bs.len % 8 and not self._datastore.offset:
# Use the quick find method
f = self._findbytes
x = bs._getbytes()
else:
f = self._findregex
x = re.compile(bs._getbin())
while True:
p = f(x, start, end, bytealigned)
if not p:
break
if count is not None and c >= count:
return
c += 1
try:
self._pos = p[0]
except AttributeError:
pass
yield p[0]
if bytealigned:
start = p[0] + 8
else:
start = p[0] + 1
if start >= end:
break
return
def rfind(self, bs, start=None, end=None, bytealigned=None):
"""Find final occurrence of substring bs.
Returns a single item tuple with the bit position if found, or an
empty tuple if not found. The bit position (pos property) will
also be set to the start of the substring if it is found.
bs -- The bitstring to find.
start -- The bit position to end the reverse search. Defaults to 0.
end -- The bit position one past the first bit to reverse search.
Defaults to self.len.
bytealigned -- If True the bitstring will only be found on byte
boundaries.
Raises ValueError if bs is empty, if start < 0, if end > self.len or
if end < start.
"""
bs = Bits(bs)
start, end = self._validate_slice(start, end)
if bytealigned is None:
bytealigned = globals()['bytealigned']
if not bs.len:
raise ValueError("Cannot find an empty bitstring.")
# Search chunks starting near the end and then moving back
# until we find bs.
increment = max(8192, bs.len * 80)
buffersize = min(increment + bs.len, end - start)
pos = max(start, end - buffersize)
while True:
found = list(self.findall(bs, start=pos, end=pos + buffersize,
bytealigned=bytealigned))
if not found:
if pos == start:
return ()
pos = max(start, pos - increment)
continue
return (found[-1],)
def cut(self, bits, start=None, end=None, count=None):
"""Return bitstring generator by cutting into bits sized chunks.
bits -- The size in bits of the bitstring chunks to generate.
start -- The bit position to start the first cut. Defaults to 0.
end -- The bit position one past the last bit to use in the cut.
Defaults to self.len.
count -- If specified then at most count items are generated.
Default is to cut as many times as possible.
"""
start, end = self._validate_slice(start, end)
if count is not None and count < 0:
raise ValueError("Cannot cut - count must be >= 0.")
if bits <= 0:
raise ValueError("Cannot cut - bits must be >= 0.")
c = 0
while count is None or c < count:
c += 1
nextchunk = self._slice(start, min(start + bits, end))
if nextchunk.len != bits:
return
assert nextchunk._assertsanity()
yield nextchunk
start += bits
return
def split(self, delimiter, start=None, end=None, count=None,
bytealigned=None):
"""Return bitstring generator by splittling using a delimiter.
The first item returned is the initial bitstring before the delimiter,
which may be an empty bitstring.
delimiter -- The bitstring used as the divider.
start -- The bit position to start the split. Defaults to 0.
end -- The bit position one past the last bit to use in the split.
Defaults to self.len.
count -- If specified then at most count items are generated.
Default is to split as many times as possible.
bytealigned -- If True splits will only occur on byte boundaries.
Raises ValueError if the delimiter is empty.
"""
delimiter = Bits(delimiter)
if not delimiter.len:
raise ValueError("split delimiter cannot be empty.")
start, end = self._validate_slice(start, end)
if bytealigned is None:
bytealigned = globals()['bytealigned']
if count is not None and count < 0:
raise ValueError("Cannot split - count must be >= 0.")
if count == 0:
return
if bytealigned and not delimiter.len % 8 and not self._datastore.offset:
# Use the quick find method
f = self._findbytes
x = delimiter._getbytes()
else:
f = self._findregex
x = re.compile(delimiter._getbin())
found = f(x, start, end, bytealigned)
if not found:
# Initial bits are the whole bitstring being searched
yield self._slice(start, end)
return
# yield the bytes before the first occurrence of the delimiter, even if empty
yield self._slice(start, found[0])
startpos = pos = found[0]
c = 1
while count is None or c < count:
pos += delimiter.len
found = f(x, pos, end, bytealigned)
if not found:
# No more occurrences, so return the rest of the bitstring
yield self._slice(startpos, end)
return
c += 1
yield self._slice(startpos, found[0])
startpos = pos = found[0]
# Have generated count bitstrings, so time to quit.
return
def join(self, sequence):
"""Return concatenation of bitstrings joined by self.
sequence -- A sequence of bitstrings.
"""
s = self.__class__()
i = iter(sequence)
try:
s._append(Bits(next(i)))
while True:
n = next(i)
s._append(self)
s._append(Bits(n))
except StopIteration:
pass
return s
def tobytes(self):
"""Return the bitstring as bytes, padding with zero bits if needed.
Up to seven zero bits will be added at the end to byte align.
"""
d = offsetcopy(self._datastore, 0).rawbytes
# Need to ensure that unused bits at end are set to zero
unusedbits = 8 - self.len % 8
if unusedbits != 8:
d[-1] &= (0xff << unusedbits)
return bytes(d)
def tofile(self, f):
"""Write the bitstring to a file object, padding with zero bits if needed.
Up to seven zero bits will be added at the end to byte align.
"""
# If the bitstring is file based then we don't want to read it all
# in to memory.
chunksize = 1024 * 1024 # 1 MB chunks
if not self._offset:
a = 0
bytelen = self._datastore.bytelength
p = self._datastore.getbyteslice(a, min(a + chunksize, bytelen - 1))
while len(p) == chunksize:
f.write(p)
a += chunksize
p = self._datastore.getbyteslice(a, min(a + chunksize, bytelen - 1))
f.write(p)
# Now the final byte, ensuring that unused bits at end are set to 0.
bits_in_final_byte = self.len % 8
if not bits_in_final_byte:
bits_in_final_byte = 8
f.write(self[-bits_in_final_byte:].tobytes())
else:
# Really quite inefficient...
a = 0
b = a + chunksize * 8
while b <= self.len:
f.write(self._slice(a, b)._getbytes())
a += chunksize * 8
b += chunksize * 8
if a != self.len:
f.write(self._slice(a, self.len).tobytes())
def startswith(self, prefix, start=None, end=None):
"""Return whether the current bitstring starts with prefix.
prefix -- The bitstring to search for.
start -- The bit position to start from. Defaults to 0.
end -- The bit position to end at. Defaults to self.len.
"""
prefix = Bits(prefix)
start, end = self._validate_slice(start, end)
if end < start + prefix.len:
return False
end = start + prefix.len
return self._slice(start, end) == prefix
def endswith(self, suffix, start=None, end=None):
"""Return whether the current bitstring ends with suffix.
suffix -- The bitstring to search for.
start -- The bit position to start from. Defaults to 0.
end -- The bit position to end at. Defaults to self.len.
"""
suffix = Bits(suffix)
start, end = self._validate_slice(start, end)
if start + suffix.len > end:
return False
start = end - suffix.len
return self._slice(start, end) == suffix
def all(self, value, pos=None):
"""Return True if one or many bits are all set to value.
value -- If value is True then checks for bits set to 1, otherwise
checks for bits set to 0.
pos -- An iterable of bit positions. Negative numbers are treated in
the same way as slice indices. Defaults to the whole bitstring.
"""
value = bool(value)
length = self.len
if pos is None:
pos = xrange(self.len)
for p in pos:
if p < 0:
p += length
if not 0 <= p < length:
raise IndexError("Bit position {0} out of range.".format(p))
if not self._datastore.getbit(p) is value:
return False
return True
def any(self, value, pos=None):
"""Return True if any of one or many bits are set to value.
value -- If value is True then checks for bits set to 1, otherwise
checks for bits set to 0.
pos -- An iterable of bit positions. Negative numbers are treated in
the same way as slice indices. Defaults to the whole bitstring.
"""
value = bool(value)
length = self.len
if pos is None:
pos = xrange(self.len)
for p in pos:
if p < 0:
p += length
if not 0 <= p < length:
raise IndexError("Bit position {0} out of range.".format(p))
if self._datastore.getbit(p) is value:
return True
return False
def count(self, value):
"""Return count of total number of either zero or one bits.
value -- If True then bits set to 1 are counted, otherwise bits set
to 0 are counted.
>>> Bits('0xef').count(1)
7
"""
if not self.len:
return 0
# count the number of 1s (from which it's easy to work out the 0s).
# Don't count the final byte yet.
count = sum(BIT_COUNT[self._datastore.getbyte(i)] for i in xrange(self._datastore.bytelength - 1))
# adjust for bits at start that aren't part of the bitstring
if self._offset:
count -= BIT_COUNT[self._datastore.getbyte(0) >> (8 - self._offset)]
# and count the last 1 - 8 bits at the end.
endbits = self._datastore.bytelength * 8 - (self._offset + self.len)
count += BIT_COUNT[self._datastore.getbyte(self._datastore.bytelength - 1) >> endbits]
return count if value else self.len - count
# Create native-endian functions as aliases depending on the byteorder
if byteorder == 'little':
_setfloatne = _setfloatle
_readfloatne = _readfloatle
_getfloatne = _getfloatle
_setuintne = _setuintle
_readuintne = _readuintle
_getuintne = _getuintle
_setintne = _setintle
_readintne = _readintle
_getintne = _getintle
else:
_setfloatne = _setfloat
_readfloatne = _readfloat
_getfloatne = _getfloat
_setuintne = _setuintbe
_readuintne = _readuintbe
_getuintne = _getuintbe
_setintne = _setintbe
_readintne = _readintbe
_getintne = _getintbe
_offset = property(_getoffset)
len = property(_getlength,
doc="""The length of the bitstring in bits. Read only.
""")
length = property(_getlength,
doc="""The length of the bitstring in bits. Read only.
""")
bool = property(_getbool,
doc="""The bitstring as a bool (True or False). Read only.
""")
hex = property(_gethex,
doc="""The bitstring as a hexadecimal string. Read only.
""")
bin = property(_getbin,
doc="""The bitstring as a binary string. Read only.
""")
oct = property(_getoct,
doc="""The bitstring as an octal string. Read only.
""")
bytes = property(_getbytes,
doc="""The bitstring as a bytes object. Read only.
""")
int = property(_getint,
doc="""The bitstring as a two's complement signed int. Read only.
""")
uint = property(_getuint,
doc="""The bitstring as a two's complement unsigned int. Read only.
""")
float = property(_getfloat,
doc="""The bitstring as a floating point number. Read only.
""")
intbe = property(_getintbe,
doc="""The bitstring as a two's complement big-endian signed int. Read only.
""")
uintbe = property(_getuintbe,
doc="""The bitstring as a two's complement big-endian unsigned int. Read only.
""")
floatbe = property(_getfloat,
doc="""The bitstring as a big-endian floating point number. Read only.
""")
intle = property(_getintle,
doc="""The bitstring as a two's complement little-endian signed int. Read only.
""")
uintle = property(_getuintle,
doc="""The bitstring as a two's complement little-endian unsigned int. Read only.
""")
floatle = property(_getfloatle,
doc="""The bitstring as a little-endian floating point number. Read only.
""")
intne = property(_getintne,
doc="""The bitstring as a two's complement native-endian signed int. Read only.
""")
uintne = property(_getuintne,
doc="""The bitstring as a two's complement native-endian unsigned int. Read only.
""")
floatne = property(_getfloatne,
doc="""The bitstring as a native-endian floating point number. Read only.
""")
ue = property(_getue,
doc="""The bitstring as an unsigned exponential-Golomb code. Read only.
""")
se = property(_getse,
doc="""The bitstring as a signed exponential-Golomb code. Read only.
""")
uie = property(_getuie,
doc="""The bitstring as an unsigned interleaved exponential-Golomb code. Read only.
""")
sie = property(_getsie,
doc="""The bitstring as a signed interleaved exponential-Golomb code. Read only.
""")
# Dictionary that maps token names to the function that reads them.
name_to_read = {'uint': Bits._readuint,
'uintle': Bits._readuintle,
'uintbe': Bits._readuintbe,
'uintne': Bits._readuintne,
'int': Bits._readint,
'intle': Bits._readintle,
'intbe': Bits._readintbe,
'intne': Bits._readintne,
'float': Bits._readfloat,
'floatbe': Bits._readfloat, # floatbe is a synonym for float
'floatle': Bits._readfloatle,
'floatne': Bits._readfloatne,
'hex': Bits._readhex,
'oct': Bits._readoct,
'bin': Bits._readbin,
'bits': Bits._readbits,
'bytes': Bits._readbytes,
'ue': Bits._readue,
'se': Bits._readse,
'uie': Bits._readuie,
'sie': Bits._readsie,
'bool': Bits._readbool,
}
# Dictionaries for mapping init keywords with init functions.
init_with_length_and_offset = {'bytes': Bits._setbytes_safe,
'filename': Bits._setfile,
}
init_with_length_only = {'uint': Bits._setuint,
'int': Bits._setint,
'float': Bits._setfloat,
'uintbe': Bits._setuintbe,
'intbe': Bits._setintbe,
'floatbe': Bits._setfloat,
'uintle': Bits._setuintle,
'intle': Bits._setintle,
'floatle': Bits._setfloatle,
'uintne': Bits._setuintne,
'intne': Bits._setintne,
'floatne': Bits._setfloatne,
}
init_without_length_or_offset = {'bin': Bits._setbin_safe,
'hex': Bits._sethex,
'oct': Bits._setoct,
'ue': Bits._setue,
'se': Bits._setse,
'uie': Bits._setuie,
'sie': Bits._setsie,
'bool': Bits._setbool,
}
class BitArray(Bits):
"""A container holding a mutable sequence of bits.
Subclass of the immutable Bits class. Inherits all of its
methods (except __hash__) and adds mutating methods.
Mutating methods:
append() -- Append a bitstring.
byteswap() -- Change byte endianness in-place.
insert() -- Insert a bitstring.
invert() -- Flip bit(s) between one and zero.
overwrite() -- Overwrite a section with a new bitstring.
prepend() -- Prepend a bitstring.
replace() -- Replace occurrences of one bitstring with another.
reverse() -- Reverse bits in-place.
rol() -- Rotate bits to the left.
ror() -- Rotate bits to the right.
set() -- Set bit(s) to 1 or 0.
Methods inherited from Bits:
all() -- Check if all specified bits are set to 1 or 0.
any() -- Check if any of specified bits are set to 1 or 0.
count() -- Count the number of bits set to 1 or 0.
cut() -- Create generator of constant sized chunks.
endswith() -- Return whether the bitstring ends with a sub-string.
find() -- Find a sub-bitstring in the current bitstring.
findall() -- Find all occurrences of a sub-bitstring in the current bitstring.
join() -- Join bitstrings together using current bitstring.
rfind() -- Seek backwards to find a sub-bitstring.
split() -- Create generator of chunks split by a delimiter.
startswith() -- Return whether the bitstring starts with a sub-bitstring.
tobytes() -- Return bitstring as bytes, padding if needed.
tofile() -- Write bitstring to file, padding if needed.
unpack() -- Interpret bits using format string.
Special methods:
Mutating operators are available: [], <<=, >>=, +=, *=, &=, |= and ^=
in addition to the inherited [], ==, !=, +, *, ~, <<, >>, &, | and ^.
Properties:
bin -- The bitstring as a binary string.
bool -- For single bit bitstrings, interpret as True or False.
bytepos -- The current byte position in the bitstring.
bytes -- The bitstring as a bytes object.
float -- Interpret as a floating point number.
floatbe -- Interpret as a big-endian floating point number.
floatle -- Interpret as a little-endian floating point number.
floatne -- Interpret as a native-endian floating point number.
hex -- The bitstring as a hexadecimal string.
int -- Interpret as a two's complement signed integer.
intbe -- Interpret as a big-endian signed integer.
intle -- Interpret as a little-endian signed integer.
intne -- Interpret as a native-endian signed integer.
len -- Length of the bitstring in bits.
oct -- The bitstring as an octal string.
pos -- The current bit position in the bitstring.
se -- Interpret as a signed exponential-Golomb code.
ue -- Interpret as an unsigned exponential-Golomb code.
sie -- Interpret as a signed interleaved exponential-Golomb code.
uie -- Interpret as an unsigned interleaved exponential-Golomb code.
uint -- Interpret as a two's complement unsigned integer.
uintbe -- Interpret as a big-endian unsigned integer.
uintle -- Interpret as a little-endian unsigned integer.
uintne -- Interpret as a native-endian unsigned integer.
"""
__slots__ = ()
# As BitArray objects are mutable, we shouldn't allow them to be hashed.
__hash__ = None
def __init__(self, auto=None, length=None, offset=None, **kwargs):
"""Either specify an 'auto' initialiser:
auto -- a string of comma separated tokens, an integer, a file object,
a bytearray, a boolean iterable or another bitstring.
Or initialise via **kwargs with one (and only one) of:
bytes -- raw data as a string, for example read from a binary file.
bin -- binary string representation, e.g. '0b001010'.
hex -- hexadecimal string representation, e.g. '0x2ef'
oct -- octal string representation, e.g. '0o777'.
uint -- an unsigned integer.
int -- a signed integer.
float -- a floating point number.
uintbe -- an unsigned big-endian whole byte integer.
intbe -- a signed big-endian whole byte integer.
floatbe - a big-endian floating point number.
uintle -- an unsigned little-endian whole byte integer.
intle -- a signed little-endian whole byte integer.
floatle -- a little-endian floating point number.
uintne -- an unsigned native-endian whole byte integer.
intne -- a signed native-endian whole byte integer.
floatne -- a native-endian floating point number.
se -- a signed exponential-Golomb code.
ue -- an unsigned exponential-Golomb code.
sie -- a signed interleaved exponential-Golomb code.
uie -- an unsigned interleaved exponential-Golomb code.
bool -- a boolean (True or False).
filename -- a file which will be opened in binary read-only mode.
Other keyword arguments:
length -- length of the bitstring in bits, if needed and appropriate.
It must be supplied for all integer and float initialisers.
offset -- bit offset to the data. These offset bits are
ignored and this is intended for use when
initialising using 'bytes' or 'filename'.
"""
# For mutable BitArrays we always read in files to memory:
if not isinstance(self._datastore, ByteStore):
self._ensureinmemory()
def __new__(cls, auto=None, length=None, offset=None, **kwargs):
x = super(BitArray, cls).__new__(cls)
y = Bits.__new__(BitArray, auto, length, offset, **kwargs)
x._datastore = y._datastore
return x
def __iadd__(self, bs):
"""Append bs to current bitstring. Return self.
bs -- the bitstring to append.
"""
self.append(bs)
return self
def __copy__(self):
"""Return a new copy of the BitArray."""
s_copy = BitArray()
if not isinstance(self._datastore, ByteStore):
# Let them both point to the same (invariant) array.
# If either gets modified then at that point they'll be read into memory.
s_copy._datastore = self._datastore
else:
s_copy._datastore = copy.copy(self._datastore)
return s_copy
def __setitem__(self, key, value):
"""Set item or range to new value.
Indices are in units of the step parameter (default 1 bit).
Stepping is used to specify the number of bits in each item.
If the length of the bitstring is changed then pos will be moved
to after the inserted section, otherwise it will remain unchanged.
>>> s = BitArray('0xff')
>>> s[0:1:4] = '0xe'
>>> print s
'0xef'
>>> s[4:4] = '0x00'
>>> print s
'0xe00f'
"""
try:
# A slice
start, step = 0, 1
if key.step is not None:
step = key.step
except AttributeError:
# single element
if key < 0:
key += self.len
if not 0 <= key < self.len:
raise IndexError("Slice index out of range.")
if isinstance(value, numbers.Integral):
if not value:
self._unset(key)
return
if value in (1, -1):
self._set(key)
return
raise ValueError("Cannot set a single bit with integer {0}.".format(value))
value = Bits(value)
if value.len == 1:
# TODO: this can't be optimal
if value[0]:
self._set(key)
else:
self._unset(key)
else:
self._delete(1, key)
self._insert(value, key)
return
else:
if step != 1:
# convert to binary string and use string slicing
# TODO: Horribly inefficent
temp = list(self._getbin())
v = list(Bits(value)._getbin())
temp.__setitem__(key, v)
self._setbin_unsafe(''.join(temp))
return
# If value is an integer then we want to set the slice to that
# value rather than initialise a new bitstring of that length.
if not isinstance(value, numbers.Integral):
try:
# TODO: Better way than calling constructor here?
value = Bits(value)
except TypeError:
raise TypeError("Bitstring, integer or string expected. "
"Got {0}.".format(type(value)))
if key.start is not None:
start = key.start
if key.start < 0:
start += self.len
if start < 0:
start = 0
stop = self.len
if key.stop is not None:
stop = key.stop
if key.stop < 0:
stop += self.len
if start > stop:
# The standard behaviour for lists is to just insert at the
# start position if stop < start and step == 1.
stop = start
if isinstance(value, numbers.Integral):
if value >= 0:
value = self.__class__(uint=value, length=stop - start)
else:
value = self.__class__(int=value, length=stop - start)
stop = min(stop, self.len)
start = max(start, 0)
start = min(start, stop)
if (stop - start) == value.len:
if not value.len:
return
if step >= 0:
self._overwrite(value, start)
else:
self._overwrite(value.__getitem__(slice(None, None, 1)), start)
else:
# TODO: A delete then insert is wasteful - it could do unneeded shifts.
# Could be either overwrite + insert or overwrite + delete.
self._delete(stop - start, start)
if step >= 0:
self._insert(value, start)
else:
self._insert(value.__getitem__(slice(None, None, 1)), start)
# pos is now after the inserted piece.
return
def __delitem__(self, key):
"""Delete item or range.
Indices are in units of the step parameter (default 1 bit).
Stepping is used to specify the number of bits in each item.
>>> a = BitArray('0x001122')
>>> del a[1:2:8]
>>> print a
0x0022
"""
try:
# A slice
start = 0
step = key.step if key.step is not None else 1
except AttributeError:
# single element
if key < 0:
key += self.len
if not 0 <= key < self.len:
raise IndexError("Slice index out of range.")
self._delete(1, key)
return
else:
if step != 1:
# convert to binary string and use string slicing
# TODO: Horribly inefficent
temp = list(self._getbin())
temp.__delitem__(key)
self._setbin_unsafe(''.join(temp))
return
stop = key.stop
if key.start is not None:
start = key.start
if key.start < 0 and stop is None:
start += self.len
if start < 0:
start = 0
if stop is None:
stop = self.len
if start > stop:
return
stop = min(stop, self.len)
start = max(start, 0)
start = min(start, stop)
self._delete(stop - start, start)
return
def __ilshift__(self, n):
"""Shift bits by n to the left in place. Return self.
n -- the number of bits to shift. Must be >= 0.
"""
if n < 0:
raise ValueError("Cannot shift by a negative amount.")
if not self.len:
raise ValueError("Cannot shift an empty bitstring.")
if not n:
return self
n = min(n, self.len)
return self._ilshift(n)
def __irshift__(self, n):
"""Shift bits by n to the right in place. Return self.
n -- the number of bits to shift. Must be >= 0.
"""
if n < 0:
raise ValueError("Cannot shift by a negative amount.")
if not self.len:
raise ValueError("Cannot shift an empty bitstring.")
if not n:
return self
n = min(n, self.len)
return self._irshift(n)
def __imul__(self, n):
"""Concatenate n copies of self in place. Return self.
Called for expressions of the form 'a *= 3'.
n -- The number of concatenations. Must be >= 0.
"""
if n < 0:
raise ValueError("Cannot multiply by a negative integer.")
return self._imul(n)
def __ior__(self, bs):
bs = Bits(bs)
if self.len != bs.len:
raise ValueError("Bitstrings must have the same length "
"for |= operator.")
return self._ior(bs)
def __iand__(self, bs):
bs = Bits(bs)
if self.len != bs.len:
raise ValueError("Bitstrings must have the same length "
"for &= operator.")
return self._iand(bs)
def __ixor__(self, bs):
bs = Bits(bs)
if self.len != bs.len:
raise ValueError("Bitstrings must have the same length "
"for ^= operator.")
return self._ixor(bs)
def replace(self, old, new, start=None, end=None, count=None,
bytealigned=None):
"""Replace all occurrences of old with new in place.
Returns number of replacements made.
old -- The bitstring to replace.
new -- The replacement bitstring.
start -- Any occurrences that start before this will not be replaced.
Defaults to 0.
end -- Any occurrences that finish after this will not be replaced.
Defaults to self.len.
count -- The maximum number of replacements to make. Defaults to
replace all occurrences.
bytealigned -- If True replacements will only be made on byte
boundaries.
Raises ValueError if old is empty or if start or end are
out of range.
"""
old = Bits(old)
new = Bits(new)
if not old.len:
raise ValueError("Empty bitstring cannot be replaced.")
start, end = self._validate_slice(start, end)
if bytealigned is None:
bytealigned = globals()['bytealigned']
# Adjust count for use in split()
if count is not None:
count += 1
sections = self.split(old, start, end, count, bytealigned)
lengths = [s.len for s in sections]
if len(lengths) == 1:
# Didn't find anything to replace.
return 0 # no replacements done
if new is self:
# Prevent self assignment woes
new = copy.copy(self)
positions = [lengths[0] + start]
for l in lengths[1:-1]:
# Next position is the previous one plus the length of the next section.
positions.append(positions[-1] + l)
# We have all the positions that need replacements. We do them
# in reverse order so that they won't move around as we replace.
positions.reverse()
try:
# Need to calculate new pos, if this is a bitstream
newpos = self._pos
for p in positions:
self[p:p + old.len] = new
if old.len != new.len:
diff = new.len - old.len
for p in positions:
if p >= newpos:
continue
if p + old.len <= newpos:
newpos += diff
else:
newpos = p
self._pos = newpos
except AttributeError:
for p in positions:
self[p:p + old.len] = new
assert self._assertsanity()
return len(lengths) - 1
def insert(self, bs, pos=None):
"""Insert bs at bit position pos.
bs -- The bitstring to insert.
pos -- The bit position to insert at.
Raises ValueError if pos < 0 or pos > self.len.
"""
bs = Bits(bs)
if not bs.len:
return self
if bs is self:
bs = self.__copy__()
if pos is None:
try:
pos = self._pos
except AttributeError:
raise TypeError("insert require a bit position for this type.")
if pos < 0:
pos += self.len
if not 0 <= pos <= self.len:
raise ValueError("Invalid insert position.")
self._insert(bs, pos)
def overwrite(self, bs, pos=None):
"""Overwrite with bs at bit position pos.
bs -- The bitstring to overwrite with.
pos -- The bit position to begin overwriting from.
Raises ValueError if pos < 0 or pos + bs.len > self.len
"""
bs = Bits(bs)
if not bs.len:
return
if pos is None:
try:
pos = self._pos
except AttributeError:
raise TypeError("overwrite require a bit position for this type.")
if pos < 0:
pos += self.len
if pos < 0 or pos + bs.len > self.len:
raise ValueError("Overwrite exceeds boundary of bitstring.")
self._overwrite(bs, pos)
try:
self._pos = pos + bs.len
except AttributeError:
pass
def append(self, bs):
"""Append a bitstring to the current bitstring.
bs -- The bitstring to append.
"""
# The offset is a hint to make bs easily appendable.
bs = self._converttobitstring(bs, offset=(self.len + self._offset) % 8)
self._append(bs)
def prepend(self, bs):
"""Prepend a bitstring to the current bitstring.
bs -- The bitstring to prepend.
"""
bs = Bits(bs)
self._prepend(bs)
def reverse(self, start=None, end=None):
"""Reverse bits in-place.
start -- Position of first bit to reverse. Defaults to 0.
end -- One past the position of the last bit to reverse.
Defaults to self.len.
Using on an empty bitstring will have no effect.
Raises ValueError if start < 0, end > self.len or end < start.
"""
start, end = self._validate_slice(start, end)
if start == 0 and end == self.len:
self._reverse()
return
s = self._slice(start, end)
s._reverse()
self[start:end] = s
def set(self, value, pos=None):
"""Set one or many bits to 1 or 0.
value -- If True bits are set to 1, otherwise they are set to 0.
pos -- Either a single bit position or an iterable of bit positions.
Negative numbers are treated in the same way as slice indices.
Defaults to the entire bitstring.
Raises IndexError if pos < -self.len or pos >= self.len.
"""
f = self._set if value else self._unset
if pos is None:
pos = xrange(self.len)
try:
length = self.len
for p in pos:
if p < 0:
p += length
if not 0 <= p < length:
raise IndexError("Bit position {0} out of range.".format(p))
f(p)
except TypeError:
# Single pos
if pos < 0:
pos += self.len
if not 0 <= pos < length:
raise IndexError("Bit position {0} out of range.".format(pos))
f(pos)
def invert(self, pos=None):
"""Invert one or many bits from 0 to 1 or vice versa.
pos -- Either a single bit position or an iterable of bit positions.
Negative numbers are treated in the same way as slice indices.
Raises IndexError if pos < -self.len or pos >= self.len.
"""
if pos is None:
self._invert_all()
return
if not isinstance(pos, collections.Iterable):
pos = (pos,)
length = self.len
for p in pos:
if p < 0:
p += length
if not 0 <= p < length:
raise IndexError("Bit position {0} out of range.".format(p))
self._invert(p)
def ror(self, bits, start=None, end=None):
"""Rotate bits to the right in-place.
bits -- The number of bits to rotate by.
start -- Start of slice to rotate. Defaults to 0.
end -- End of slice to rotate. Defaults to self.len.
Raises ValueError if bits < 0.
"""
if not self.len:
raise Error("Cannot rotate an empty bitstring.")
if bits < 0:
raise ValueError("Cannot rotate right by negative amount.")
start, end = self._validate_slice(start, end)
bits %= (end - start)
if not bits:
return
rhs = self._slice(end - bits, end)
self._delete(bits, end - bits)
self._insert(rhs, start)
def rol(self, bits, start=None, end=None):
"""Rotate bits to the left in-place.
bits -- The number of bits to rotate by.
start -- Start of slice to rotate. Defaults to 0.
end -- End of slice to rotate. Defaults to self.len.
Raises ValueError if bits < 0.
"""
if not self.len:
raise Error("Cannot rotate an empty bitstring.")
if bits < 0:
raise ValueError("Cannot rotate left by negative amount.")
start, end = self._validate_slice(start, end)
bits %= (end - start)
if not bits:
return
lhs = self._slice(start, start + bits)
self._delete(bits, start)
self._insert(lhs, end - bits)
def byteswap(self, fmt=None, start=None, end=None, repeat=True):
"""Change the endianness in-place. Return number of repeats of fmt done.
fmt -- A compact structure string, an integer number of bytes or
an iterable of integers. Defaults to 0, which byte reverses the
whole bitstring.
start -- Start bit position, defaults to 0.
end -- End bit position, defaults to self.len.
repeat -- If True (the default) the byte swapping pattern is repeated
as much as possible.
"""
start, end = self._validate_slice(start, end)
if fmt is None or fmt == 0:
# reverse all of the whole bytes.
bytesizes = [(end - start) // 8]
elif isinstance(fmt, numbers.Integral):
if fmt < 0:
raise ValueError("Improper byte length {0}.".format(fmt))
bytesizes = [fmt]
elif isinstance(fmt, basestring):
m = STRUCT_PACK_RE.match(fmt)
if not m:
raise ValueError("Cannot parse format string {0}.".format(fmt))
# Split the format string into a list of 'q', '4h' etc.
formatlist = re.findall(STRUCT_SPLIT_RE, m.group('fmt'))
# Now deal with multiplicative factors, 4h -> hhhh etc.
bytesizes = []
for f in formatlist:
if len(f) == 1:
bytesizes.append(PACK_CODE_SIZE[f])
else:
bytesizes.extend([PACK_CODE_SIZE[f[-1]]] * int(f[:-1]))
elif isinstance(fmt, collections.Iterable):
bytesizes = fmt
for bytesize in bytesizes:
if not isinstance(bytesize, numbers.Integral) or bytesize < 0:
raise ValueError("Improper byte length {0}.".format(bytesize))
else:
raise TypeError("Format must be an integer, string or iterable.")
repeats = 0
totalbitsize = 8 * sum(bytesizes)
if not totalbitsize:
return 0
if repeat:
# Try to repeat up to the end of the bitstring.
finalbit = end
else:
# Just try one (set of) byteswap(s).
finalbit = start + totalbitsize
for patternend in xrange(start + totalbitsize, finalbit + 1, totalbitsize):
bytestart = patternend - totalbitsize
for bytesize in bytesizes:
byteend = bytestart + bytesize * 8
self._reversebytes(bytestart, byteend)
bytestart += bytesize * 8
repeats += 1
return repeats
def clear(self):
"""Remove all bits, reset to zero length."""
self._clear()
def copy(self):
"""Return a copy of the bitstring."""
return self._copy()
int = property(Bits._getint, Bits._setint,
doc="""The bitstring as a two's complement signed int. Read and write.
""")
uint = property(Bits._getuint, Bits._setuint,
doc="""The bitstring as a two's complement unsigned int. Read and write.
""")
float = property(Bits._getfloat, Bits._setfloat,
doc="""The bitstring as a floating point number. Read and write.
""")
intbe = property(Bits._getintbe, Bits._setintbe,
doc="""The bitstring as a two's complement big-endian signed int. Read and write.
""")
uintbe = property(Bits._getuintbe, Bits._setuintbe,
doc="""The bitstring as a two's complement big-endian unsigned int. Read and write.
""")
floatbe = property(Bits._getfloat, Bits._setfloat,
doc="""The bitstring as a big-endian floating point number. Read and write.
""")
intle = property(Bits._getintle, Bits._setintle,
doc="""The bitstring as a two's complement little-endian signed int. Read and write.
""")
uintle = property(Bits._getuintle, Bits._setuintle,
doc="""The bitstring as a two's complement little-endian unsigned int. Read and write.
""")
floatle = property(Bits._getfloatle, Bits._setfloatle,
doc="""The bitstring as a little-endian floating point number. Read and write.
""")
intne = property(Bits._getintne, Bits._setintne,
doc="""The bitstring as a two's complement native-endian signed int. Read and write.
""")
uintne = property(Bits._getuintne, Bits._setuintne,
doc="""The bitstring as a two's complement native-endian unsigned int. Read and write.
""")
floatne = property(Bits._getfloatne, Bits._setfloatne,
doc="""The bitstring as a native-endian floating point number. Read and write.
""")
ue = property(Bits._getue, Bits._setue,
doc="""The bitstring as an unsigned exponential-Golomb code. Read and write.
""")
se = property(Bits._getse, Bits._setse,
doc="""The bitstring as a signed exponential-Golomb code. Read and write.
""")
uie = property(Bits._getuie, Bits._setuie,
doc="""The bitstring as an unsigned interleaved exponential-Golomb code. Read and write.
""")
sie = property(Bits._getsie, Bits._setsie,
doc="""The bitstring as a signed interleaved exponential-Golomb code. Read and write.
""")
hex = property(Bits._gethex, Bits._sethex,
doc="""The bitstring as a hexadecimal string. Read and write.
""")
bin = property(Bits._getbin, Bits._setbin_safe,
doc="""The bitstring as a binary string. Read and write.
""")
oct = property(Bits._getoct, Bits._setoct,
doc="""The bitstring as an octal string. Read and write.
""")
bool = property(Bits._getbool, Bits._setbool,
doc="""The bitstring as a bool (True or False). Read and write.
""")
bytes = property(Bits._getbytes, Bits._setbytes_safe,
doc="""The bitstring as a ordinary string. Read and write.
""")
class ConstBitStream(Bits):
"""A container or stream holding an immutable sequence of bits.
For a mutable container use the BitStream class instead.
Methods inherited from Bits:
all() -- Check if all specified bits are set to 1 or 0.
any() -- Check if any of specified bits are set to 1 or 0.
count() -- Count the number of bits set to 1 or 0.
cut() -- Create generator of constant sized chunks.
endswith() -- Return whether the bitstring ends with a sub-string.
find() -- Find a sub-bitstring in the current bitstring.
findall() -- Find all occurrences of a sub-bitstring in the current bitstring.
join() -- Join bitstrings together using current bitstring.
rfind() -- Seek backwards to find a sub-bitstring.
split() -- Create generator of chunks split by a delimiter.
startswith() -- Return whether the bitstring starts with a sub-bitstring.
tobytes() -- Return bitstring as bytes, padding if needed.
tofile() -- Write bitstring to file, padding if needed.
unpack() -- Interpret bits using format string.
Other methods:
bytealign() -- Align to next byte boundary.
peek() -- Peek at and interpret next bits as a single item.
peeklist() -- Peek at and interpret next bits as a list of items.
read() -- Read and interpret next bits as a single item.
readlist() -- Read and interpret next bits as a list of items.
Special methods:
Also available are the operators [], ==, !=, +, *, ~, <<, >>, &, |, ^.
Properties:
bin -- The bitstring as a binary string.
bool -- For single bit bitstrings, interpret as True or False.
bytepos -- The current byte position in the bitstring.
bytes -- The bitstring as a bytes object.
float -- Interpret as a floating point number.
floatbe -- Interpret as a big-endian floating point number.
floatle -- Interpret as a little-endian floating point number.
floatne -- Interpret as a native-endian floating point number.
hex -- The bitstring as a hexadecimal string.
int -- Interpret as a two's complement signed integer.
intbe -- Interpret as a big-endian signed integer.
intle -- Interpret as a little-endian signed integer.
intne -- Interpret as a native-endian signed integer.
len -- Length of the bitstring in bits.
oct -- The bitstring as an octal string.
pos -- The current bit position in the bitstring.
se -- Interpret as a signed exponential-Golomb code.
ue -- Interpret as an unsigned exponential-Golomb code.
sie -- Interpret as a signed interleaved exponential-Golomb code.
uie -- Interpret as an unsigned interleaved exponential-Golomb code.
uint -- Interpret as a two's complement unsigned integer.
uintbe -- Interpret as a big-endian unsigned integer.
uintle -- Interpret as a little-endian unsigned integer.
uintne -- Interpret as a native-endian unsigned integer.
"""
__slots__ = ('_pos')
def __init__(self, auto=None, length=None, offset=None, **kwargs):
"""Either specify an 'auto' initialiser:
auto -- a string of comma separated tokens, an integer, a file object,
a bytearray, a boolean iterable or another bitstring.
Or initialise via **kwargs with one (and only one) of:
bytes -- raw data as a string, for example read from a binary file.
bin -- binary string representation, e.g. '0b001010'.
hex -- hexadecimal string representation, e.g. '0x2ef'
oct -- octal string representation, e.g. '0o777'.
uint -- an unsigned integer.
int -- a signed integer.
float -- a floating point number.
uintbe -- an unsigned big-endian whole byte integer.
intbe -- a signed big-endian whole byte integer.
floatbe - a big-endian floating point number.
uintle -- an unsigned little-endian whole byte integer.
intle -- a signed little-endian whole byte integer.
floatle -- a little-endian floating point number.
uintne -- an unsigned native-endian whole byte integer.
intne -- a signed native-endian whole byte integer.
floatne -- a native-endian floating point number.
se -- a signed exponential-Golomb code.
ue -- an unsigned exponential-Golomb code.
sie -- a signed interleaved exponential-Golomb code.
uie -- an unsigned interleaved exponential-Golomb code.
bool -- a boolean (True or False).
filename -- a file which will be opened in binary read-only mode.
Other keyword arguments:
length -- length of the bitstring in bits, if needed and appropriate.
It must be supplied for all integer and float initialisers.
offset -- bit offset to the data. These offset bits are
ignored and this is intended for use when
initialising using 'bytes' or 'filename'.
"""
self._pos = 0
def __new__(cls, auto=None, length=None, offset=None, **kwargs):
x = super(ConstBitStream, cls).__new__(cls)
x._initialise(auto, length, offset, **kwargs)
return x
def _setbytepos(self, bytepos):
"""Move to absolute byte-aligned position in stream."""
self._setbitpos(bytepos * 8)
def _getbytepos(self):
"""Return the current position in the stream in bytes. Must be byte aligned."""
if self._pos % 8:
raise ByteAlignError("Not byte aligned in _getbytepos().")
return self._pos // 8
def _setbitpos(self, pos):
"""Move to absolute postion bit in bitstream."""
if pos < 0:
raise ValueError("Bit position cannot be negative.")
if pos > self.len:
raise ValueError("Cannot seek past the end of the data.")
self._pos = pos
def _getbitpos(self):
"""Return the current position in the stream in bits."""
return self._pos
def _clear(self):
Bits._clear(self)
self._pos = 0
def __copy__(self):
"""Return a new copy of the ConstBitStream for the copy module."""
# Note that if you want a new copy (different ID), use _copy instead.
# The copy can use the same datastore as it's immutable.
s = ConstBitStream()
s._datastore = self._datastore
# Reset the bit position, don't copy it.
s._pos = 0
return s
def __add__(self, bs):
"""Concatenate bitstrings and return new bitstring.
bs -- the bitstring to append.
"""
s = Bits.__add__(self, bs)
s._pos = 0
return s
def read(self, fmt):
"""Interpret next bits according to the format string and return result.
fmt -- Token string describing how to interpret the next bits.
Token examples: 'int:12' : 12 bits as a signed integer
'uint:8' : 8 bits as an unsigned integer
'float:64' : 8 bytes as a big-endian float
'intbe:16' : 2 bytes as a big-endian signed integer
'uintbe:16' : 2 bytes as a big-endian unsigned integer
'intle:32' : 4 bytes as a little-endian signed integer
'uintle:32' : 4 bytes as a little-endian unsigned integer
'floatle:64': 8 bytes as a little-endian float
'intne:24' : 3 bytes as a native-endian signed integer
'uintne:24' : 3 bytes as a native-endian unsigned integer
'floatne:32': 4 bytes as a native-endian float
'hex:80' : 80 bits as a hex string
'oct:9' : 9 bits as an octal string
'bin:1' : single bit binary string
'ue' : next bits as unsigned exp-Golomb code
'se' : next bits as signed exp-Golomb code
'uie' : next bits as unsigned interleaved exp-Golomb code
'sie' : next bits as signed interleaved exp-Golomb code
'bits:5' : 5 bits as a bitstring
'bytes:10' : 10 bytes as a bytes object
'bool' : 1 bit as a bool
'pad:3' : 3 bits of padding to ignore - returns None
fmt may also be an integer, which will be treated like the 'bits' token.
The position in the bitstring is advanced to after the read items.
Raises ReadError if not enough bits are available.
Raises ValueError if the format is not understood.
"""
if isinstance(fmt, numbers.Integral):
if fmt < 0:
raise ValueError("Cannot read negative amount.")
if fmt > self.len - self._pos:
raise ReadError("Cannot read {0} bits, only {1} available.",
fmt, self.len - self._pos)
bs = self._slice(self._pos, self._pos + fmt)
self._pos += fmt
return bs
p = self._pos
_, token = tokenparser(fmt)
if len(token) != 1:
self._pos = p
raise ValueError("Format string should be a single token, not {0} "
"tokens - use readlist() instead.".format(len(token)))
name, length, _ = token[0]
if length is None:
length = self.len - self._pos
value, self._pos = self._readtoken(name, self._pos, length)
return value
def readlist(self, fmt, **kwargs):
"""Interpret next bits according to format string(s) and return list.
fmt -- A single string or list of strings with comma separated tokens
describing how to interpret the next bits in the bitstring. Items
can also be integers, for reading new bitstring of the given length.
kwargs -- A dictionary or keyword-value pairs - the keywords used in the
format string will be replaced with their given value.
The position in the bitstring is advanced to after the read items.
Raises ReadError is not enough bits are available.
Raises ValueError if the format is not understood.
See the docstring for 'read' for token examples. 'pad' tokens are skipped
and not added to the returned list.
>>> h, b1, b2 = s.readlist('hex:20, bin:5, bin:3')
>>> i, bs1, bs2 = s.readlist(['uint:12', 10, 10])
"""
value, self._pos = self._readlist(fmt, self._pos, **kwargs)
return value
def readto(self, bs, bytealigned=None):
"""Read up to and including next occurrence of bs and return result.
bs -- The bitstring to find. An integer is not permitted.
bytealigned -- If True the bitstring will only be
found on byte boundaries.
Raises ValueError if bs is empty.
Raises ReadError if bs is not found.
"""
if isinstance(bs, numbers.Integral):
raise ValueError("Integers cannot be searched for")
bs = Bits(bs)
oldpos = self._pos
p = self.find(bs, self._pos, bytealigned=bytealigned)
if not p:
raise ReadError("Substring not found")
self._pos += bs.len
return self._slice(oldpos, self._pos)
def peek(self, fmt):
"""Interpret next bits according to format string and return result.
fmt -- Token string describing how to interpret the next bits.
The position in the bitstring is not changed. If not enough bits are
available then all bits to the end of the bitstring will be used.
Raises ReadError if not enough bits are available.
Raises ValueError if the format is not understood.
See the docstring for 'read' for token examples.
"""
pos_before = self._pos
value = self.read(fmt)
self._pos = pos_before
return value
def peeklist(self, fmt, **kwargs):
"""Interpret next bits according to format string(s) and return list.
fmt -- One or more strings with comma separated tokens describing
how to interpret the next bits in the bitstring.
kwargs -- A dictionary or keyword-value pairs - the keywords used in the
format string will be replaced with their given value.
The position in the bitstring is not changed. If not enough bits are
available then all bits to the end of the bitstring will be used.
Raises ReadError if not enough bits are available.
Raises ValueError if the format is not understood.
See the docstring for 'read' for token examples.
"""
pos = self._pos
return_values = self.readlist(fmt, **kwargs)
self._pos = pos
return return_values
def bytealign(self):
"""Align to next byte and return number of skipped bits.
Raises ValueError if the end of the bitstring is reached before
aligning to the next byte.
"""
skipped = (8 - (self._pos % 8)) % 8
self.pos += self._offset + skipped
assert self._assertsanity()
return skipped
pos = property(_getbitpos, _setbitpos,
doc="""The position in the bitstring in bits. Read and write.
""")
bitpos = property(_getbitpos, _setbitpos,
doc="""The position in the bitstring in bits. Read and write.
""")
bytepos = property(_getbytepos, _setbytepos,
doc="""The position in the bitstring in bytes. Read and write.
""")
class BitStream(ConstBitStream, BitArray):
"""A container or stream holding a mutable sequence of bits
Subclass of the ConstBitStream and BitArray classes. Inherits all of
their methods.
Methods:
all() -- Check if all specified bits are set to 1 or 0.
any() -- Check if any of specified bits are set to 1 or 0.
append() -- Append a bitstring.
bytealign() -- Align to next byte boundary.
byteswap() -- Change byte endianness in-place.
count() -- Count the number of bits set to 1 or 0.
cut() -- Create generator of constant sized chunks.
endswith() -- Return whether the bitstring ends with a sub-string.
find() -- Find a sub-bitstring in the current bitstring.
findall() -- Find all occurrences of a sub-bitstring in the current bitstring.
insert() -- Insert a bitstring.
invert() -- Flip bit(s) between one and zero.
join() -- Join bitstrings together using current bitstring.
overwrite() -- Overwrite a section with a new bitstring.
peek() -- Peek at and interpret next bits as a single item.
peeklist() -- Peek at and interpret next bits as a list of items.
prepend() -- Prepend a bitstring.
read() -- Read and interpret next bits as a single item.
readlist() -- Read and interpret next bits as a list of items.
replace() -- Replace occurrences of one bitstring with another.
reverse() -- Reverse bits in-place.
rfind() -- Seek backwards to find a sub-bitstring.
rol() -- Rotate bits to the left.
ror() -- Rotate bits to the right.
set() -- Set bit(s) to 1 or 0.
split() -- Create generator of chunks split by a delimiter.
startswith() -- Return whether the bitstring starts with a sub-bitstring.
tobytes() -- Return bitstring as bytes, padding if needed.
tofile() -- Write bitstring to file, padding if needed.
unpack() -- Interpret bits using format string.
Special methods:
Mutating operators are available: [], <<=, >>=, +=, *=, &=, |= and ^=
in addition to [], ==, !=, +, *, ~, <<, >>, &, | and ^.
Properties:
bin -- The bitstring as a binary string.
bool -- For single bit bitstrings, interpret as True or False.
bytepos -- The current byte position in the bitstring.
bytes -- The bitstring as a bytes object.
float -- Interpret as a floating point number.
floatbe -- Interpret as a big-endian floating point number.
floatle -- Interpret as a little-endian floating point number.
floatne -- Interpret as a native-endian floating point number.
hex -- The bitstring as a hexadecimal string.
int -- Interpret as a two's complement signed integer.
intbe -- Interpret as a big-endian signed integer.
intle -- Interpret as a little-endian signed integer.
intne -- Interpret as a native-endian signed integer.
len -- Length of the bitstring in bits.
oct -- The bitstring as an octal string.
pos -- The current bit position in the bitstring.
se -- Interpret as a signed exponential-Golomb code.
ue -- Interpret as an unsigned exponential-Golomb code.
sie -- Interpret as a signed interleaved exponential-Golomb code.
uie -- Interpret as an unsigned interleaved exponential-Golomb code.
uint -- Interpret as a two's complement unsigned integer.
uintbe -- Interpret as a big-endian unsigned integer.
uintle -- Interpret as a little-endian unsigned integer.
uintne -- Interpret as a native-endian unsigned integer.
"""
__slots__ = ()
# As BitStream objects are mutable, we shouldn't allow them to be hashed.
__hash__ = None
def __init__(self, auto=None, length=None, offset=None, **kwargs):
"""Either specify an 'auto' initialiser:
auto -- a string of comma separated tokens, an integer, a file object,
a bytearray, a boolean iterable or another bitstring.
Or initialise via **kwargs with one (and only one) of:
bytes -- raw data as a string, for example read from a binary file.
bin -- binary string representation, e.g. '0b001010'.
hex -- hexadecimal string representation, e.g. '0x2ef'
oct -- octal string representation, e.g. '0o777'.
uint -- an unsigned integer.
int -- a signed integer.
float -- a floating point number.
uintbe -- an unsigned big-endian whole byte integer.
intbe -- a signed big-endian whole byte integer.
floatbe - a big-endian floating point number.
uintle -- an unsigned little-endian whole byte integer.
intle -- a signed little-endian whole byte integer.
floatle -- a little-endian floating point number.
uintne -- an unsigned native-endian whole byte integer.
intne -- a signed native-endian whole byte integer.
floatne -- a native-endian floating point number.
se -- a signed exponential-Golomb code.
ue -- an unsigned exponential-Golomb code.
sie -- a signed interleaved exponential-Golomb code.
uie -- an unsigned interleaved exponential-Golomb code.
bool -- a boolean (True or False).
filename -- a file which will be opened in binary read-only mode.
Other keyword arguments:
length -- length of the bitstring in bits, if needed and appropriate.
It must be supplied for all integer and float initialisers.
offset -- bit offset to the data. These offset bits are
ignored and this is intended for use when
initialising using 'bytes' or 'filename'.
"""
self._pos = 0
# For mutable BitStreams we always read in files to memory:
if not isinstance(self._datastore, ByteStore):
self._ensureinmemory()
def __new__(cls, auto=None, length=None, offset=None, **kwargs):
x = super(BitStream, cls).__new__(cls)
x._initialise(auto, length, offset, **kwargs)
return x
def __copy__(self):
"""Return a new copy of the BitStream."""
s_copy = BitStream()
s_copy._pos = 0
if not isinstance(self._datastore, ByteStore):
# Let them both point to the same (invariant) array.
# If either gets modified then at that point they'll be read into memory.
s_copy._datastore = self._datastore
else:
s_copy._datastore = ByteStore(self._datastore._rawarray[:],
self._datastore.bitlength,
self._datastore.offset)
return s_copy
def prepend(self, bs):
"""Prepend a bitstring to the current bitstring.
bs -- The bitstring to prepend.
"""
bs = self._converttobitstring(bs)
self._prepend(bs)
self._pos += bs.len
def pack(fmt, *values, **kwargs):
"""Pack the values according to the format string and return a new BitStream.
fmt -- A single string or a list of strings with comma separated tokens
describing how to create the BitStream.
values -- Zero or more values to pack according to the format.
kwargs -- A dictionary or keyword-value pairs - the keywords used in the
format string will be replaced with their given value.
Token examples: 'int:12' : 12 bits as a signed integer
'uint:8' : 8 bits as an unsigned integer
'float:64' : 8 bytes as a big-endian float
'intbe:16' : 2 bytes as a big-endian signed integer
'uintbe:16' : 2 bytes as a big-endian unsigned integer
'intle:32' : 4 bytes as a little-endian signed integer
'uintle:32' : 4 bytes as a little-endian unsigned integer
'floatle:64': 8 bytes as a little-endian float
'intne:24' : 3 bytes as a native-endian signed integer
'uintne:24' : 3 bytes as a native-endian unsigned integer
'floatne:32': 4 bytes as a native-endian float
'hex:80' : 80 bits as a hex string
'oct:9' : 9 bits as an octal string
'bin:1' : single bit binary string
'ue' / 'uie': next bits as unsigned exp-Golomb code
'se' / 'sie': next bits as signed exp-Golomb code
'bits:5' : 5 bits as a bitstring object
'bytes:10' : 10 bytes as a bytes object
'bool' : 1 bit as a bool
'pad:3' : 3 zero bits as padding
>>> s = pack('uint:12, bits', 100, '0xffe')
>>> t = pack(['bits', 'bin:3'], s, '111')
>>> u = pack('uint:8=a, uint:8=b, uint:55=a', a=6, b=44)
"""
tokens = []
if isinstance(fmt, basestring):
fmt = [fmt]
try:
for f_item in fmt:
_, tkns = tokenparser(f_item, tuple(sorted(kwargs.keys())))
tokens.extend(tkns)
except ValueError as e:
raise CreationError(*e.args)
value_iter = iter(values)
s = BitStream()
try:
for name, length, value in tokens:
# If the value is in the kwd dictionary then it takes precedence.
if value in kwargs:
value = kwargs[value]
# If the length is in the kwd dictionary then use that too.
if length in kwargs:
length = kwargs[length]
# Also if we just have a dictionary name then we want to use it
if name in kwargs and length is None and value is None:
s.append(kwargs[name])
continue
if length is not None:
length = int(length)
if value is None and name != 'pad':
# Take the next value from the ones provided
value = next(value_iter)
s._append(BitStream._init_with_token(name, length, value))
except StopIteration:
raise CreationError("Not enough parameters present to pack according to the "
"format. {0} values are needed.", len(tokens))
try:
next(value_iter)
except StopIteration:
# Good, we've used up all the *values.
return s
raise CreationError("Too many parameters present to pack according to the format.")
# Aliases for backward compatibility
ConstBitArray = Bits
BitString = BitStream
__all__ = ['ConstBitArray', 'ConstBitStream', 'BitStream', 'BitArray',
'Bits', 'BitString', 'pack', 'Error', 'ReadError',
'InterpretError', 'ByteAlignError', 'CreationError', 'bytealigned']
|
{
"content_hash": "27975531fc9773a65f065d88d7d52a94",
"timestamp": "",
"source": "github",
"line_count": 4241,
"max_line_length": 120,
"avg_line_length": 39.11624616835652,
"alnum_prop": 0.5535288018710969,
"repo_name": "iLambda/lsdj-wave-cruncher",
"id": "aa078d077bfc8be18e8e9264cdb3b5b4360bc538",
"size": "165914",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "lib/pylsdj/bread/bitstring/bitstring.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "9253"
},
{
"name": "Python",
"bytes": "488195"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import boto.redshift
from moto.core import BaseBackend, BaseModel
from moto.ec2 import ec2_backends
from .exceptions import (
ClusterNotFoundError,
ClusterParameterGroupNotFoundError,
ClusterSecurityGroupNotFoundError,
ClusterSubnetGroupNotFoundError,
InvalidSubnetError,
)
class Cluster(BaseModel):
def __init__(self, redshift_backend, cluster_identifier, node_type, master_username,
master_user_password, db_name, cluster_type, cluster_security_groups,
vpc_security_group_ids, cluster_subnet_group_name, availability_zone,
preferred_maintenance_window, cluster_parameter_group_name,
automated_snapshot_retention_period, port, cluster_version,
allow_version_upgrade, number_of_nodes, publicly_accessible,
encrypted, region):
self.redshift_backend = redshift_backend
self.cluster_identifier = cluster_identifier
self.node_type = node_type
self.master_username = master_username
self.master_user_password = master_user_password
self.db_name = db_name if db_name else "dev"
self.vpc_security_group_ids = vpc_security_group_ids
self.cluster_subnet_group_name = cluster_subnet_group_name
self.publicly_accessible = publicly_accessible
self.encrypted = encrypted
self.allow_version_upgrade = allow_version_upgrade if allow_version_upgrade is not None else True
self.cluster_version = cluster_version if cluster_version else "1.0"
self.port = int(port) if port else 5439
self.automated_snapshot_retention_period = int(
automated_snapshot_retention_period) if automated_snapshot_retention_period else 1
self.preferred_maintenance_window = preferred_maintenance_window if preferred_maintenance_window else "Mon:03:00-Mon:03:30"
if cluster_parameter_group_name:
self.cluster_parameter_group_name = [cluster_parameter_group_name]
else:
self.cluster_parameter_group_name = ['default.redshift-1.0']
if cluster_security_groups:
self.cluster_security_groups = cluster_security_groups
else:
self.cluster_security_groups = ["Default"]
self.region = region
if availability_zone:
self.availability_zone = availability_zone
else:
# This could probably be smarter, but there doesn't appear to be a
# way to pull AZs for a region in boto
self.availability_zone = region + "a"
if cluster_type == 'single-node':
self.number_of_nodes = 1
elif number_of_nodes:
self.number_of_nodes = int(number_of_nodes)
else:
self.number_of_nodes = 1
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
redshift_backend = redshift_backends[region_name]
properties = cloudformation_json['Properties']
if 'ClusterSubnetGroupName' in properties:
subnet_group_name = properties[
'ClusterSubnetGroupName'].cluster_subnet_group_name
else:
subnet_group_name = None
cluster = redshift_backend.create_cluster(
cluster_identifier=resource_name,
node_type=properties.get('NodeType'),
master_username=properties.get('MasterUsername'),
master_user_password=properties.get('MasterUserPassword'),
db_name=properties.get('DBName'),
cluster_type=properties.get('ClusterType'),
cluster_security_groups=properties.get(
'ClusterSecurityGroups', []),
vpc_security_group_ids=properties.get('VpcSecurityGroupIds', []),
cluster_subnet_group_name=subnet_group_name,
availability_zone=properties.get('AvailabilityZone'),
preferred_maintenance_window=properties.get(
'PreferredMaintenanceWindow'),
cluster_parameter_group_name=properties.get(
'ClusterParameterGroupName'),
automated_snapshot_retention_period=properties.get(
'AutomatedSnapshotRetentionPeriod'),
port=properties.get('Port'),
cluster_version=properties.get('ClusterVersion'),
allow_version_upgrade=properties.get('AllowVersionUpgrade'),
number_of_nodes=properties.get('NumberOfNodes'),
publicly_accessible=properties.get("PubliclyAccessible"),
encrypted=properties.get("Encrypted"),
region=region_name,
)
return cluster
def get_cfn_attribute(self, attribute_name):
from moto.cloudformation.exceptions import UnformattedGetAttTemplateException
if attribute_name == 'Endpoint.Address':
return self.endpoint
elif attribute_name == 'Endpoint.Port':
return self.port
raise UnformattedGetAttTemplateException()
@property
def endpoint(self):
return "{0}.cg034hpkmmjt.{1}.redshift.amazonaws.com".format(
self.cluster_identifier,
self.region,
)
@property
def security_groups(self):
return [
security_group for security_group
in self.redshift_backend.describe_cluster_security_groups()
if security_group.cluster_security_group_name in self.cluster_security_groups
]
@property
def vpc_security_groups(self):
return [
security_group for security_group
in self.redshift_backend.ec2_backend.describe_security_groups()
if security_group.id in self.vpc_security_group_ids
]
@property
def parameter_groups(self):
return [
parameter_group for parameter_group
in self.redshift_backend.describe_cluster_parameter_groups()
if parameter_group.cluster_parameter_group_name in self.cluster_parameter_group_name
]
def to_json(self):
return {
"MasterUsername": self.master_username,
"MasterUserPassword": "****",
"ClusterVersion": self.cluster_version,
"VpcSecurityGroups": [{
"Status": "active",
"VpcSecurityGroupId": group.id
} for group in self.vpc_security_groups],
"ClusterSubnetGroupName": self.cluster_subnet_group_name,
"AvailabilityZone": self.availability_zone,
"ClusterStatus": "creating",
"NumberOfNodes": self.number_of_nodes,
"AutomatedSnapshotRetentionPeriod": self.automated_snapshot_retention_period,
"PubliclyAccessible": self.publicly_accessible,
"Encrypted": self.encrypted,
"DBName": self.db_name,
"PreferredMaintenanceWindow": self.preferred_maintenance_window,
"ClusterParameterGroups": [{
"ParameterApplyStatus": "in-sync",
"ParameterGroupName": group.cluster_parameter_group_name,
} for group in self.parameter_groups],
"ClusterSecurityGroups": [{
"Status": "active",
"ClusterSecurityGroupName": group.cluster_security_group_name,
} for group in self.security_groups],
"Port": self.port,
"NodeType": self.node_type,
"ClusterIdentifier": self.cluster_identifier,
"AllowVersionUpgrade": self.allow_version_upgrade,
}
class SubnetGroup(BaseModel):
def __init__(self, ec2_backend, cluster_subnet_group_name, description, subnet_ids):
self.ec2_backend = ec2_backend
self.cluster_subnet_group_name = cluster_subnet_group_name
self.description = description
self.subnet_ids = subnet_ids
if not self.subnets:
raise InvalidSubnetError(subnet_ids)
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
redshift_backend = redshift_backends[region_name]
properties = cloudformation_json['Properties']
subnet_group = redshift_backend.create_cluster_subnet_group(
cluster_subnet_group_name=resource_name,
description=properties.get("Description"),
subnet_ids=properties.get("SubnetIds", []),
)
return subnet_group
@property
def subnets(self):
return self.ec2_backend.get_all_subnets(filters={'subnet-id': self.subnet_ids})
@property
def vpc_id(self):
return self.subnets[0].vpc_id
def to_json(self):
return {
"VpcId": self.vpc_id,
"Description": self.description,
"ClusterSubnetGroupName": self.cluster_subnet_group_name,
"SubnetGroupStatus": "Complete",
"Subnets": [{
"SubnetStatus": "Active",
"SubnetIdentifier": subnet.id,
"SubnetAvailabilityZone": {
"Name": subnet.availability_zone
},
} for subnet in self.subnets],
}
class SecurityGroup(BaseModel):
def __init__(self, cluster_security_group_name, description):
self.cluster_security_group_name = cluster_security_group_name
self.description = description
def to_json(self):
return {
"EC2SecurityGroups": [],
"IPRanges": [],
"Description": self.description,
"ClusterSecurityGroupName": self.cluster_security_group_name,
}
class ParameterGroup(BaseModel):
def __init__(self, cluster_parameter_group_name, group_family, description):
self.cluster_parameter_group_name = cluster_parameter_group_name
self.group_family = group_family
self.description = description
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
redshift_backend = redshift_backends[region_name]
properties = cloudformation_json['Properties']
parameter_group = redshift_backend.create_cluster_parameter_group(
cluster_parameter_group_name=resource_name,
description=properties.get("Description"),
group_family=properties.get("ParameterGroupFamily"),
)
return parameter_group
def to_json(self):
return {
"ParameterGroupFamily": self.group_family,
"Description": self.description,
"ParameterGroupName": self.cluster_parameter_group_name,
}
class RedshiftBackend(BaseBackend):
def __init__(self, ec2_backend):
self.clusters = {}
self.subnet_groups = {}
self.security_groups = {
"Default": SecurityGroup("Default", "Default Redshift Security Group")
}
self.parameter_groups = {
"default.redshift-1.0": ParameterGroup(
"default.redshift-1.0",
"redshift-1.0",
"Default Redshift parameter group",
)
}
self.ec2_backend = ec2_backend
def reset(self):
ec2_backend = self.ec2_backend
self.__dict__ = {}
self.__init__(ec2_backend)
def create_cluster(self, **cluster_kwargs):
cluster_identifier = cluster_kwargs['cluster_identifier']
cluster = Cluster(self, **cluster_kwargs)
self.clusters[cluster_identifier] = cluster
return cluster
def describe_clusters(self, cluster_identifier=None):
clusters = self.clusters.values()
if cluster_identifier:
if cluster_identifier in self.clusters:
return [self.clusters[cluster_identifier]]
else:
raise ClusterNotFoundError(cluster_identifier)
return clusters
def modify_cluster(self, **cluster_kwargs):
cluster_identifier = cluster_kwargs.pop('cluster_identifier')
new_cluster_identifier = cluster_kwargs.pop(
'new_cluster_identifier', None)
cluster = self.describe_clusters(cluster_identifier)[0]
for key, value in cluster_kwargs.items():
setattr(cluster, key, value)
if new_cluster_identifier:
self.delete_cluster(cluster_identifier)
cluster.cluster_identifier = new_cluster_identifier
self.clusters[new_cluster_identifier] = cluster
return cluster
def delete_cluster(self, cluster_identifier):
if cluster_identifier in self.clusters:
return self.clusters.pop(cluster_identifier)
raise ClusterNotFoundError(cluster_identifier)
def create_cluster_subnet_group(self, cluster_subnet_group_name, description, subnet_ids):
subnet_group = SubnetGroup(
self.ec2_backend, cluster_subnet_group_name, description, subnet_ids)
self.subnet_groups[cluster_subnet_group_name] = subnet_group
return subnet_group
def describe_cluster_subnet_groups(self, subnet_identifier=None):
subnet_groups = self.subnet_groups.values()
if subnet_identifier:
if subnet_identifier in self.subnet_groups:
return [self.subnet_groups[subnet_identifier]]
else:
raise ClusterSubnetGroupNotFoundError(subnet_identifier)
return subnet_groups
def delete_cluster_subnet_group(self, subnet_identifier):
if subnet_identifier in self.subnet_groups:
return self.subnet_groups.pop(subnet_identifier)
raise ClusterSubnetGroupNotFoundError(subnet_identifier)
def create_cluster_security_group(self, cluster_security_group_name, description):
security_group = SecurityGroup(
cluster_security_group_name, description)
self.security_groups[cluster_security_group_name] = security_group
return security_group
def describe_cluster_security_groups(self, security_group_name=None):
security_groups = self.security_groups.values()
if security_group_name:
if security_group_name in self.security_groups:
return [self.security_groups[security_group_name]]
else:
raise ClusterSecurityGroupNotFoundError(security_group_name)
return security_groups
def delete_cluster_security_group(self, security_group_identifier):
if security_group_identifier in self.security_groups:
return self.security_groups.pop(security_group_identifier)
raise ClusterSecurityGroupNotFoundError(security_group_identifier)
def create_cluster_parameter_group(self, cluster_parameter_group_name,
group_family, description):
parameter_group = ParameterGroup(
cluster_parameter_group_name, group_family, description)
self.parameter_groups[cluster_parameter_group_name] = parameter_group
return parameter_group
def describe_cluster_parameter_groups(self, parameter_group_name=None):
parameter_groups = self.parameter_groups.values()
if parameter_group_name:
if parameter_group_name in self.parameter_groups:
return [self.parameter_groups[parameter_group_name]]
else:
raise ClusterParameterGroupNotFoundError(parameter_group_name)
return parameter_groups
def delete_cluster_parameter_group(self, parameter_group_name):
if parameter_group_name in self.parameter_groups:
return self.parameter_groups.pop(parameter_group_name)
raise ClusterParameterGroupNotFoundError(parameter_group_name)
redshift_backends = {}
for region in boto.redshift.regions():
redshift_backends[region.name] = RedshiftBackend(ec2_backends[region.name])
|
{
"content_hash": "90ce52ae70edc1ccecfd579101265e8f",
"timestamp": "",
"source": "github",
"line_count": 389,
"max_line_length": 131,
"avg_line_length": 40.65809768637532,
"alnum_prop": 0.6420080930703086,
"repo_name": "heddle317/moto",
"id": "5e64f7a166efe1d998c848f61f65510079db2f65",
"size": "15816",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "moto/redshift/models.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "5848"
},
{
"name": "Java",
"bytes": "1688"
},
{
"name": "JavaScript",
"bytes": "756"
},
{
"name": "Makefile",
"bytes": "630"
},
{
"name": "Python",
"bytes": "2633276"
},
{
"name": "Ruby",
"bytes": "188"
}
],
"symlink_target": ""
}
|
class Solution:
def combine(self, n, k):
return [list(elem) for elem in itertools.combinations(xrange(1, n + 1), k)]
|
{
"content_hash": "29b5633fe1a2b94df85b5d8697c2fc24",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 83,
"avg_line_length": 44,
"alnum_prop": 0.6287878787878788,
"repo_name": "rahul-ramadas/leetcode",
"id": "ed8401ec1d5c8edd0900f03492d84a0063786f04",
"size": "132",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "combinations/Solution.6808610.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "121"
},
{
"name": "C++",
"bytes": "107572"
},
{
"name": "Python",
"bytes": "167196"
}
],
"symlink_target": ""
}
|
"""
PF: Magnetic: Inversion Linear
===============================
Create a synthetic block model and invert
with a compact norm
"""
import matplotlib.pyplot as plt
import numpy as np
from discretize import TensorMesh
from SimPEG.potential_fields import magnetics
from SimPEG import utils
from SimPEG import (
data,
data_misfit,
maps,
regularization,
optimization,
inverse_problem,
directives,
inversion,
)
def run(plotIt=True):
# Define the inducing field parameter
H0 = (50000, 90, 0)
# Create a mesh
dx = 5.0
hxind = [(dx, 5, -1.3), (dx, 10), (dx, 5, 1.3)]
hyind = [(dx, 5, -1.3), (dx, 10), (dx, 5, 1.3)]
hzind = [(dx, 5, -1.3), (dx, 10)]
mesh = TensorMesh([hxind, hyind, hzind], "CCC")
# Get index of the center
midx = int(mesh.shape_cells[0] / 2)
midy = int(mesh.shape_cells[1] / 2)
# Lets create a simple Gaussian topo and set the active cells
[xx, yy] = np.meshgrid(mesh.nodes_x, mesh.nodes_y)
zz = -np.exp((xx ** 2 + yy ** 2) / 75 ** 2) + mesh.nodes_z[-1]
# We would usually load a topofile
topo = np.c_[utils.mkvc(xx), utils.mkvc(yy), utils.mkvc(zz)]
# Go from topo to array of indices of active cells
actv = utils.surface2ind_topo(mesh, topo, "N")
actv = np.where(actv)[0]
nC = len(actv)
# Create and array of observation points
xr = np.linspace(-20.0, 20.0, 20)
yr = np.linspace(-20.0, 20.0, 20)
X, Y = np.meshgrid(xr, yr)
# Move the observation points 5m above the topo
Z = -np.exp((X ** 2 + Y ** 2) / 75 ** 2) + mesh.nodes_z[-1] + 5.0
# Create a MAGsurvey
rxLoc = np.c_[utils.mkvc(X.T), utils.mkvc(Y.T), utils.mkvc(Z.T)]
rxLoc = magnetics.receivers.Point(rxLoc, components=["tmi"])
srcField = magnetics.sources.SourceField(receiver_list=[rxLoc], parameters=H0)
survey = magnetics.survey.Survey(srcField)
# We can now create a susceptibility model and generate data
# Here a simple block in half-space
model = np.zeros(mesh.shape_cells)
model[(midx - 2) : (midx + 2), (midy - 2) : (midy + 2), -6:-2] = 0.02
model = utils.mkvc(model)
model = model[actv]
# Create active map to go from reduce set to full
actvMap = maps.InjectActiveCells(mesh, actv, -100)
# Create reduced identity map
idenMap = maps.IdentityMap(nP=nC)
# Create the forward model operator
simulation = magnetics.simulation.Simulation3DIntegral(
survey=survey,
mesh=mesh,
chiMap=idenMap,
ind_active=actv,
)
# Compute linear forward operator and compute some data
d = simulation.dpred(model)
# Add noise and uncertainties
# We add some random Gaussian noise (1nT)
synthetic_data = d + np.random.randn(len(d))
wd = np.ones(len(synthetic_data)) * 1.0 # Assign flat uncertainties
data_object = data.Data(survey, dobs=synthetic_data, noise_floor=wd)
# Create a regularization
reg = regularization.Sparse(mesh, indActive=actv, mapping=idenMap)
reg.mref = np.zeros(nC)
reg.norms = [0, 0, 0, 0]
# reg.eps_p, reg.eps_q = 1e-0, 1e-0
# Create sensitivity weights from our linear forward operator
rxLoc = survey.source_field.receiver_list[0].locations
m0 = np.ones(nC) * 1e-4 # Starting model
# Data misfit function
dmis = data_misfit.L2DataMisfit(simulation=simulation, data=data_object)
dmis.W = 1 / wd
# Add directives to the inversion
opt = optimization.ProjectedGNCG(
maxIter=20, lower=0.0, upper=1.0, maxIterLS=20, maxIterCG=20, tolCG=1e-3
)
invProb = inverse_problem.BaseInvProblem(dmis, reg, opt)
betaest = directives.BetaEstimate_ByEig(beta0_ratio=1e-1)
# Here is where the norms are applied
# Use pick a threshold parameter empirically based on the distribution of
# model parameters
IRLS = directives.Update_IRLS(f_min_change=1e-3, max_irls_iterations=40)
saveDict = directives.SaveOutputEveryIteration(save_txt=False)
update_Jacobi = directives.UpdatePreconditioner()
# Add sensitivity weights
sensitivity_weights = directives.UpdateSensitivityWeights(everyIter=False)
inv = inversion.BaseInversion(
invProb,
directiveList=[sensitivity_weights, IRLS, betaest, update_Jacobi, saveDict],
)
# Run the inversion
mrec = inv.run(m0)
if plotIt:
# Here is the recovered susceptibility model
ypanel = midx
zpanel = -5
m_l2 = actvMap * invProb.l2model
m_l2[m_l2 == -100] = np.nan
m_lp = actvMap * mrec
m_lp[m_lp == -100] = np.nan
m_true = actvMap * model
m_true[m_true == -100] = np.nan
# Plot the data
utils.plot_utils.plot2Ddata(rxLoc, d)
plt.figure()
# Plot L2 model
ax = plt.subplot(321)
mesh.plot_slice(
m_l2,
ax=ax,
normal="Z",
ind=zpanel,
grid=True,
clim=(model.min(), model.max()),
)
plt.plot(
([mesh.cell_centers_x[0], mesh.cell_centers_x[-1]]),
([mesh.cell_centers_y[ypanel], mesh.cell_centers_y[ypanel]]),
color="w",
)
plt.title("Plan l2-model.")
plt.gca().set_aspect("equal")
plt.ylabel("y")
ax.xaxis.set_visible(False)
plt.gca().set_aspect("equal", adjustable="box")
# Vertica section
ax = plt.subplot(322)
mesh.plot_slice(
m_l2,
ax=ax,
normal="Y",
ind=midx,
grid=True,
clim=(model.min(), model.max()),
)
plt.plot(
([mesh.cell_centers_x[0], mesh.cell_centers_x[-1]]),
([mesh.cell_centers_z[zpanel], mesh.cell_centers_z[zpanel]]),
color="w",
)
plt.title("E-W l2-model.")
plt.gca().set_aspect("equal")
ax.xaxis.set_visible(False)
plt.ylabel("z")
plt.gca().set_aspect("equal", adjustable="box")
# Plot Lp model
ax = plt.subplot(323)
mesh.plot_slice(
m_lp,
ax=ax,
normal="Z",
ind=zpanel,
grid=True,
clim=(model.min(), model.max()),
)
plt.plot(
([mesh.cell_centers_x[0], mesh.cell_centers_x[-1]]),
([mesh.cell_centers_y[ypanel], mesh.cell_centers_y[ypanel]]),
color="w",
)
plt.title("Plan lp-model.")
plt.gca().set_aspect("equal")
ax.xaxis.set_visible(False)
plt.ylabel("y")
plt.gca().set_aspect("equal", adjustable="box")
# Vertical section
ax = plt.subplot(324)
mesh.plot_slice(
m_lp,
ax=ax,
normal="Y",
ind=midx,
grid=True,
clim=(model.min(), model.max()),
)
plt.plot(
([mesh.cell_centers_x[0], mesh.cell_centers_x[-1]]),
([mesh.cell_centers_z[zpanel], mesh.cell_centers_z[zpanel]]),
color="w",
)
plt.title("E-W lp-model.")
plt.gca().set_aspect("equal")
ax.xaxis.set_visible(False)
plt.ylabel("z")
plt.gca().set_aspect("equal", adjustable="box")
# Plot True model
ax = plt.subplot(325)
mesh.plot_slice(
m_true,
ax=ax,
normal="Z",
ind=zpanel,
grid=True,
clim=(model.min(), model.max()),
)
plt.plot(
([mesh.cell_centers_x[0], mesh.cell_centers_x[-1]]),
([mesh.cell_centers_y[ypanel], mesh.cell_centers_y[ypanel]]),
color="w",
)
plt.title("Plan true model.")
plt.gca().set_aspect("equal")
plt.xlabel("x")
plt.ylabel("y")
plt.gca().set_aspect("equal", adjustable="box")
# Vertical section
ax = plt.subplot(326)
mesh.plot_slice(
m_true,
ax=ax,
normal="Y",
ind=midx,
grid=True,
clim=(model.min(), model.max()),
)
plt.plot(
([mesh.cell_centers_x[0], mesh.cell_centers_x[-1]]),
([mesh.cell_centers_z[zpanel], mesh.cell_centers_z[zpanel]]),
color="w",
)
plt.title("E-W true model.")
plt.gca().set_aspect("equal")
plt.xlabel("x")
plt.ylabel("z")
plt.gca().set_aspect("equal", adjustable="box")
# Plot convergence curves
fig, axs = plt.figure(), plt.subplot()
axs.plot(saveDict.phi_d, "k", lw=2)
axs.plot(
np.r_[IRLS.iterStart, IRLS.iterStart],
np.r_[0, np.max(saveDict.phi_d)],
"k:",
)
twin = axs.twinx()
twin.plot(saveDict.phi_m, "k--", lw=2)
axs.text(
IRLS.iterStart,
0,
"IRLS Steps",
va="bottom",
ha="center",
rotation="vertical",
size=12,
bbox={"facecolor": "white"},
)
axs.set_ylabel(r"$\phi_d$", size=16, rotation=0)
axs.set_xlabel("Iterations", size=14)
twin.set_ylabel(r"$\phi_m$", size=16, rotation=0)
if __name__ == "__main__":
run()
plt.show()
|
{
"content_hash": "d18c42b7fde03487ed821b161c8c563e",
"timestamp": "",
"source": "github",
"line_count": 312,
"max_line_length": 84,
"avg_line_length": 29.762820512820515,
"alnum_prop": 0.5480292914064183,
"repo_name": "simpeg/simpeg",
"id": "e8e17eb64683965b02b7181de23fc0c5999e58ef",
"size": "9286",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "examples/_archived/plot_inv_mag_linear.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "685"
},
{
"name": "Python",
"bytes": "3476002"
}
],
"symlink_target": ""
}
|
from mock import ANY
from raptiformica.actions.mesh import ensure_no_consul_running
from tests.testcase import TestCase
class TestEnsureNoConsulRunning(TestCase):
def setUp(self):
self.log = self.set_up_patch('raptiformica.actions.mesh.log')
self.run_command_print_ready = self.set_up_patch(
'raptiformica.actions.mesh.run_command_print_ready'
)
def test_ensure_no_consul_running_logs_stopping_any_consul_agent_message(self):
ensure_no_consul_running()
self.log.info.assert_called_once_with(ANY)
def test_ensure_no_consul_running_kills_any_running_consul_agents(self):
ensure_no_consul_running()
expected_command = "ps aux | grep [c]onsul | awk '{print $2}' | " \
"xargs --no-run-if-empty -I {} " \
"sh -c \"grep -q docker /proc/{}/cgroup && " \
"! grep -q name=systemd:/docker /proc/1/cgroup || kill -2 {}\""
self.run_command_print_ready.assert_called_once_with(
expected_command,
shell=True,
buffered=False
)
self.assertIn(
'ps aux |', expected_command,
'It should list all processes on the system'
)
self.assertIn(
'| grep [c]onsul |', expected_command,
'Should find the processes with consul in the name, '
'excluding this one'
)
self.assertIn(
"| awk '{print $2}' |", expected_command,
'Should print the PID of the processes matching the name'
)
self.assertIn(
"xargs --no-run-if-empty", expected_command,
'Should map over the found PIDs, do nothing if no matches'
)
self.assertIn(
"-I {} sh -c \"grep -q docker /proc/{}/cgroup && "
"! grep -q name=systemd:/docker /proc/1/cgroup || kill -2 {}\"",
expected_command,
'Should only kill processes not in Docker containers unless '
'running inside a Docker, those could have their own raptiformica '
'instances running'
)
self.assertIn(
"kill -2",
expected_command,
'Should gracefully kill consul. The default kill signal is '
'SIGTERM. Consul does not shut down gracefully on SIGTERM, '
'only on SIGINT (2). If it does not shut down gracefully there '
'is a chance that the socket will enter a TCP-WAIT state '
'after which Linux has a timeout of around 60 seconds before '
'the port becomes available again'
)
|
{
"content_hash": "4e592df8a54c7d617bfed7fd2eb1626a",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 90,
"avg_line_length": 40.84615384615385,
"alnum_prop": 0.5713747645951036,
"repo_name": "vdloo/raptiformica",
"id": "f2f5456ef16c71364531c9d0330753f500fd2444",
"size": "2655",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/unit/raptiformica/actions/mesh/test_ensure_no_consul_running.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "601"
},
{
"name": "Makefile",
"bytes": "491"
},
{
"name": "Python",
"bytes": "712131"
},
{
"name": "Ruby",
"bytes": "2573"
},
{
"name": "Shell",
"bytes": "12007"
}
],
"symlink_target": ""
}
|
"""Tests for utility functions in reportlab.pdfbase.pdfutils.
"""
__version__='''$Id$'''
from reportlab.lib.testutils import setOutDir,makeSuiteForClasses, printLocation
setOutDir(__name__)
import os
import unittest
from reportlab.pdfbase.pdfutils import _AsciiHexEncode, _AsciiHexDecode
from reportlab.pdfbase.pdfutils import _AsciiBase85Encode, _AsciiBase85Decode
class PdfEncodingTestCase(unittest.TestCase):
"Test various encodings used in PDF files."
def testAsciiHex(self):
"Test if the obvious test for whether ASCII-Hex encoding works."
plainText = 'What is the average velocity of a sparrow?'
encoded = _AsciiHexEncode(plainText)
decoded = _AsciiHexDecode(encoded)
msg = "Round-trip AsciiHex encoding failed."
assert decoded == plainText, msg
def testAsciiBase85(self):
"Test if the obvious test for whether ASCII-Base85 encoding works."
msg = "Round-trip AsciiBase85 encoding failed."
plain = 'What is the average velocity of a sparrow?'
#the remainder block can be absent or from 1 to 4 bytes
for i in xrange(55):
encoded = _AsciiBase85Encode(plain)
decoded = _AsciiBase85Decode(encoded)
assert decoded == plain, msg
plain = plain + chr(i)
def makeSuite():
return makeSuiteForClasses(PdfEncodingTestCase)
#noruntests
if __name__ == "__main__":
unittest.TextTestRunner().run(makeSuite())
printLocation()
|
{
"content_hash": "56c927872af242d11b7dd21b5ce801a5",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 80,
"avg_line_length": 32.30434782608695,
"alnum_prop": 0.6911170928667564,
"repo_name": "makinacorpus/reportlab-ecomobile",
"id": "0097e2a110c2b78cd0365707803dd05b4de2c0c2",
"size": "1566",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_pdfbase_pdfutils.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "764229"
},
{
"name": "C++",
"bytes": "2019"
},
{
"name": "Java",
"bytes": "6333"
},
{
"name": "Python",
"bytes": "2863462"
},
{
"name": "Shell",
"bytes": "1783"
}
],
"symlink_target": ""
}
|
from csvout import report
|
{
"content_hash": "013b2117f0791ec54b6a76596babcbbe",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 25,
"avg_line_length": 26,
"alnum_prop": 0.8461538461538461,
"repo_name": "gwh59/cloud-custodian",
"id": "1916cb28ae931be4638cff2eaa1d82f5c36098fd",
"size": "611",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "c7n/reports/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "1122"
},
{
"name": "Python",
"bytes": "610848"
}
],
"symlink_target": ""
}
|
import os
def find_parent(path, times=1):
parent = os.path.dirname(path)
return parent if times <= 1 else find_parent(parent, times-1)
if __name__ == '__main__':
project_dir = find_parent(os.path.abspath(__file__), 2)
print 'export PYTHONPATH=$PYTHON_PATH:%s' % (project_dir,)
|
{
"content_hash": "a68af79c56e0346ffa0172abfa869c0f",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 65,
"avg_line_length": 29.6,
"alnum_prop": 0.6418918918918919,
"repo_name": "Everley1993/Laky-Earo",
"id": "75e39988ffca938a3f67b1cd75e843a9f38155b6",
"size": "338",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "script/pythonpath.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1431"
},
{
"name": "HTML",
"bytes": "9945"
},
{
"name": "JavaScript",
"bytes": "14650"
},
{
"name": "Makefile",
"bytes": "109"
},
{
"name": "Python",
"bytes": "91442"
}
],
"symlink_target": ""
}
|
"""Test node responses to invalid network messages."""
import asyncio
import struct
import sys
from test_framework import messages
from test_framework.mininode import P2PDataStore, NetworkThread
from test_framework.test_framework import BitcoinTestFramework
class msg_unrecognized:
"""Nonsensical message. Modeled after similar types in test_framework.messages."""
command = b'badmsg'
def __init__(self, *, str_data):
self.str_data = str_data.encode() if not isinstance(str_data, bytes) else str_data
def serialize(self):
return messages.ser_string(self.str_data)
def __repr__(self):
return "{}(data={})".format(self.command, self.str_data)
class InvalidMessagesTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
def run_test(self):
"""
. Test msg header
0. Send a bunch of large (4MB) messages of an unrecognized type. Check to see
that it isn't an effective DoS against the node.
1. Send an oversized (4MB+) message and check that we're disconnected.
2. Send a few messages with an incorrect data size in the header, ensure the
messages are ignored.
"""
self.test_magic_bytes()
self.test_checksum()
self.test_size()
self.test_command()
node = self.nodes[0]
self.node = node
node.add_p2p_connection(P2PDataStore())
conn2 = node.add_p2p_connection(P2PDataStore())
msg_limit = 4 * 1000 * 1000 # 4MB, per MAX_PROTOCOL_MESSAGE_LENGTH
valid_data_limit = msg_limit - 5 # Account for the 4-byte length prefix
#
# 0.
#
# Send as large a message as is valid, ensure we aren't disconnected but
# also can't exhaust resources.
#
msg_at_size = msg_unrecognized(str_data="b" * valid_data_limit)
assert len(msg_at_size.serialize()) == msg_limit
self.log.info("Sending a bunch of large, junk messages to test memory exhaustion. May take a bit...")
# Run a bunch of times to test for memory exhaustion.
for _ in range(80):
node.p2p.send_message(msg_at_size)
# Check that, even though the node is being hammered by nonsense from one
# connection, it can still service other peers in a timely way.
for _ in range(20):
conn2.sync_with_ping(timeout=2)
# Peer 1, despite serving up a bunch of nonsense, should still be connected.
self.log.info("Waiting for node to drop junk messages.")
node.p2p.sync_with_ping(timeout=400)
assert node.p2p.is_connected
#
# 1.
#
# Send an oversized message, ensure we're disconnected.
#
# Under macOS this test is skipped due to an unexpected error code
# returned from the closing socket which python/asyncio does not
# yet know how to handle.
#
if sys.platform != 'darwin':
msg_over_size = msg_unrecognized(str_data="b" * (valid_data_limit + 1))
assert len(msg_over_size.serialize()) == (msg_limit + 1)
# An unknown message type (or *any* message type) over
# MAX_PROTOCOL_MESSAGE_LENGTH should result in a disconnect.
node.p2p.send_message(msg_over_size)
node.p2p.wait_for_disconnect(timeout=4)
node.disconnect_p2ps()
conn = node.add_p2p_connection(P2PDataStore())
conn.wait_for_verack()
else:
self.log.info("Skipping test p2p_invalid_messages/1 (oversized message) under macOS")
#
# 2.
#
# Send messages with an incorrect data size in the header.
#
actual_size = 100
msg = msg_unrecognized(str_data="b" * actual_size)
# TODO: handle larger-than cases. I haven't been able to pin down what behavior to expect.
for wrong_size in (2, 77, 78, 79):
self.log.info("Sending a message with incorrect size of {}".format(wrong_size))
# Unmodified message should submit okay.
node.p2p.send_and_ping(msg)
# A message lying about its data size results in a disconnect when the incorrect
# data size is less than the actual size.
#
# TODO: why does behavior change at 78 bytes?
#
node.p2p.send_raw_message(self._tweak_msg_data_size(msg, wrong_size))
# For some reason unknown to me, we sometimes have to push additional data to the
# peer in order for it to realize a disconnect.
try:
node.p2p.send_message(messages.msg_ping(nonce=123123))
except IOError:
pass
node.p2p.wait_for_disconnect(timeout=10)
node.disconnect_p2ps()
node.add_p2p_connection(P2PDataStore())
# Node is still up.
conn = node.add_p2p_connection(P2PDataStore())
conn.sync_with_ping()
def test_magic_bytes(self):
conn = self.nodes[0].add_p2p_connection(P2PDataStore())
async def swap_magic_bytes():
conn._on_data = lambda: None # Need to ignore all incoming messages from now, since they come with "invalid" magic bytes
conn.magic_bytes = b'\x00\x11\x22\x32'
# Call .result() to block until the atomic swap is complete, otherwise
# we might run into races later on
asyncio.run_coroutine_threadsafe(swap_magic_bytes(), NetworkThread.network_event_loop).result()
with self.nodes[0].assert_debug_log(['PROCESSMESSAGE: INVALID MESSAGESTART ping']):
conn.send_message(messages.msg_ping(nonce=0xff))
conn.wait_for_disconnect(timeout=1)
self.nodes[0].disconnect_p2ps()
def test_checksum(self):
conn = self.nodes[0].add_p2p_connection(P2PDataStore())
with self.nodes[0].assert_debug_log(['CHECKSUM ERROR (badmsg, 2 bytes), expected 78df0a04 was ffffffff']):
msg = conn.build_message(msg_unrecognized(str_data="d"))
cut_len = (
4 + # magic
12 + # command
4 #len
)
# modify checksum
msg = msg[:cut_len] + b'\xff' * 4 + msg[cut_len + 4:]
self.nodes[0].p2p.send_raw_message(msg)
conn.sync_with_ping(timeout=1)
self.nodes[0].disconnect_p2ps()
def test_size(self):
conn = self.nodes[0].add_p2p_connection(P2PDataStore())
with self.nodes[0].assert_debug_log(['']):
msg = conn.build_message(msg_unrecognized(str_data="d"))
cut_len = (
4 + # magic
12 # command
)
# modify len to MAX_SIZE + 1
msg = msg[:cut_len] + struct.pack("<I", 0x02000000 + 1) + msg[cut_len + 4:]
self.nodes[0].p2p.send_raw_message(msg)
conn.wait_for_disconnect(timeout=1)
self.nodes[0].disconnect_p2ps()
def test_command(self):
conn = self.nodes[0].add_p2p_connection(P2PDataStore())
with self.nodes[0].assert_debug_log(['PROCESSMESSAGE: ERRORS IN HEADER']):
msg = msg_unrecognized(str_data="d")
msg.command = b'\xff' * 12
msg = conn.build_message(msg)
# Modify command
msg = msg[:7] + b'\x00' + msg[7 + 1:]
self.nodes[0].p2p.send_raw_message(msg)
conn.sync_with_ping(timeout=1)
self.nodes[0].disconnect_p2ps()
def _tweak_msg_data_size(self, message, wrong_size):
"""
Return a raw message based on another message but with an incorrect data size in
the message header.
"""
raw_msg = self.node.p2p.build_message(message)
bad_size_bytes = struct.pack("<I", wrong_size)
num_header_bytes_before_size = 4 + 12
# Replace the correct data size in the message with an incorrect one.
raw_msg_with_wrong_size = (
raw_msg[:num_header_bytes_before_size] +
bad_size_bytes +
raw_msg[(num_header_bytes_before_size + len(bad_size_bytes)):]
)
assert len(raw_msg) == len(raw_msg_with_wrong_size)
return raw_msg_with_wrong_size
if __name__ == '__main__':
InvalidMessagesTest().main()
|
{
"content_hash": "ceac58a4d97179ae54ef15130746f11a",
"timestamp": "",
"source": "github",
"line_count": 220,
"max_line_length": 133,
"avg_line_length": 38.13636363636363,
"alnum_prop": 0.5943980929678189,
"repo_name": "gjhiggins/vcoincore",
"id": "9876d749ff13b1431b9142b719b640c0f3b77218",
"size": "8604",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "test/functional/p2p_invalid_messages.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "28453"
},
{
"name": "C",
"bytes": "751390"
},
{
"name": "C++",
"bytes": "7015926"
},
{
"name": "CMake",
"bytes": "28560"
},
{
"name": "HTML",
"bytes": "21833"
},
{
"name": "Java",
"bytes": "30291"
},
{
"name": "M4",
"bytes": "208246"
},
{
"name": "Makefile",
"bytes": "106414"
},
{
"name": "Objective-C",
"bytes": "2162"
},
{
"name": "Objective-C++",
"bytes": "5497"
},
{
"name": "Python",
"bytes": "1742616"
},
{
"name": "QMake",
"bytes": "798"
},
{
"name": "Scheme",
"bytes": "6044"
},
{
"name": "Shell",
"bytes": "138233"
}
],
"symlink_target": ""
}
|
import os
import re
import sys
import copy
import glob
import atexit
import tempfile
import subprocess
import shutil
import multiprocessing
import textwrap
import distutils
from distutils.errors import DistutilsError
try:
from threading import local as tlocal
except ImportError:
from dummy_threading import local as tlocal
# stores temporary directory of each thread to only create one per thread
_tdata = tlocal()
# store all created temporary directories so they can be deleted on exit
_tmpdirs = []
def clean_up_temporary_directory():
if _tmpdirs is not None:
for d in _tmpdirs:
try:
shutil.rmtree(d)
except OSError:
pass
atexit.register(clean_up_temporary_directory)
from numpy.compat import npy_load_module
__all__ = ['Configuration', 'get_numpy_include_dirs', 'default_config_dict',
'dict_append', 'appendpath', 'generate_config_py',
'get_cmd', 'allpath', 'get_mathlibs',
'terminal_has_colors', 'red_text', 'green_text', 'yellow_text',
'blue_text', 'cyan_text', 'cyg2win32', 'mingw32', 'all_strings',
'has_f_sources', 'has_cxx_sources', 'filter_sources',
'get_dependencies', 'is_local_src_dir', 'get_ext_source_files',
'get_script_files', 'get_lib_source_files', 'get_data_files',
'dot_join', 'get_frame', 'minrelpath', 'njoin',
'is_sequence', 'is_string', 'as_list', 'gpaths', 'get_language',
'quote_args', 'get_build_architecture', 'get_info', 'get_pkg_info',
'get_num_build_jobs']
class InstallableLib:
"""
Container to hold information on an installable library.
Parameters
----------
name : str
Name of the installed library.
build_info : dict
Dictionary holding build information.
target_dir : str
Absolute path specifying where to install the library.
See Also
--------
Configuration.add_installed_library
Notes
-----
The three parameters are stored as attributes with the same names.
"""
def __init__(self, name, build_info, target_dir):
self.name = name
self.build_info = build_info
self.target_dir = target_dir
def get_num_build_jobs():
"""
Get number of parallel build jobs set by the --parallel command line
argument of setup.py
If the command did not receive a setting the environment variable
NPY_NUM_BUILD_JOBS is checked. If that is unset, return the number of
processors on the system, with a maximum of 8 (to prevent
overloading the system if there a lot of CPUs).
Returns
-------
out : int
number of parallel jobs that can be run
"""
from numpy.distutils.core import get_distribution
try:
cpu_count = len(os.sched_getaffinity(0))
except AttributeError:
cpu_count = multiprocessing.cpu_count()
cpu_count = min(cpu_count, 8)
envjobs = int(os.environ.get("NPY_NUM_BUILD_JOBS", cpu_count))
dist = get_distribution()
# may be None during configuration
if dist is None:
return envjobs
# any of these three may have the job set, take the largest
cmdattr = (getattr(dist.get_command_obj('build'), 'parallel', None),
getattr(dist.get_command_obj('build_ext'), 'parallel', None),
getattr(dist.get_command_obj('build_clib'), 'parallel', None))
if all(x is None for x in cmdattr):
return envjobs
else:
return max(x for x in cmdattr if x is not None)
def quote_args(args):
# don't used _nt_quote_args as it does not check if
# args items already have quotes or not.
args = list(args)
for i in range(len(args)):
a = args[i]
if ' ' in a and a[0] not in '"\'':
args[i] = '"%s"' % (a)
return args
def allpath(name):
"Convert a /-separated pathname to one using the OS's path separator."
splitted = name.split('/')
return os.path.join(*splitted)
def rel_path(path, parent_path):
"""Return path relative to parent_path."""
# Use realpath to avoid issues with symlinked dirs (see gh-7707)
pd = os.path.realpath(os.path.abspath(parent_path))
apath = os.path.realpath(os.path.abspath(path))
if len(apath) < len(pd):
return path
if apath == pd:
return ''
if pd == apath[:len(pd)]:
assert apath[len(pd)] in [os.sep], repr((path, apath[len(pd)]))
path = apath[len(pd)+1:]
return path
def get_path_from_frame(frame, parent_path=None):
"""Return path of the module given a frame object from the call stack.
Returned path is relative to parent_path when given,
otherwise it is absolute path.
"""
# First, try to find if the file name is in the frame.
try:
caller_file = eval('__file__', frame.f_globals, frame.f_locals)
d = os.path.dirname(os.path.abspath(caller_file))
except NameError:
# __file__ is not defined, so let's try __name__. We try this second
# because setuptools spoofs __name__ to be '__main__' even though
# sys.modules['__main__'] might be something else, like easy_install(1).
caller_name = eval('__name__', frame.f_globals, frame.f_locals)
__import__(caller_name)
mod = sys.modules[caller_name]
if hasattr(mod, '__file__'):
d = os.path.dirname(os.path.abspath(mod.__file__))
else:
# we're probably running setup.py as execfile("setup.py")
# (likely we're building an egg)
d = os.path.abspath('.')
if parent_path is not None:
d = rel_path(d, parent_path)
return d or '.'
def njoin(*path):
"""Join two or more pathname components +
- convert a /-separated pathname to one using the OS's path separator.
- resolve `..` and `.` from path.
Either passing n arguments as in njoin('a','b'), or a sequence
of n names as in njoin(['a','b']) is handled, or a mixture of such arguments.
"""
paths = []
for p in path:
if is_sequence(p):
# njoin(['a', 'b'], 'c')
paths.append(njoin(*p))
else:
assert is_string(p)
paths.append(p)
path = paths
if not path:
# njoin()
joined = ''
else:
# njoin('a', 'b')
joined = os.path.join(*path)
if os.path.sep != '/':
joined = joined.replace('/', os.path.sep)
return minrelpath(joined)
def get_mathlibs(path=None):
"""Return the MATHLIB line from numpyconfig.h
"""
if path is not None:
config_file = os.path.join(path, '_numpyconfig.h')
else:
# Look for the file in each of the numpy include directories.
dirs = get_numpy_include_dirs()
for path in dirs:
fn = os.path.join(path, '_numpyconfig.h')
if os.path.exists(fn):
config_file = fn
break
else:
raise DistutilsError('_numpyconfig.h not found in numpy include '
'dirs %r' % (dirs,))
with open(config_file) as fid:
mathlibs = []
s = '#define MATHLIB'
for line in fid:
if line.startswith(s):
value = line[len(s):].strip()
if value:
mathlibs.extend(value.split(','))
return mathlibs
def minrelpath(path):
"""Resolve `..` and '.' from path.
"""
if not is_string(path):
return path
if '.' not in path:
return path
l = path.split(os.sep)
while l:
try:
i = l.index('.', 1)
except ValueError:
break
del l[i]
j = 1
while l:
try:
i = l.index('..', j)
except ValueError:
break
if l[i-1]=='..':
j += 1
else:
del l[i], l[i-1]
j = 1
if not l:
return ''
return os.sep.join(l)
def sorted_glob(fileglob):
"""sorts output of python glob for https://bugs.python.org/issue30461
to allow extensions to have reproducible build results"""
return sorted(glob.glob(fileglob))
def _fix_paths(paths, local_path, include_non_existing):
assert is_sequence(paths), repr(type(paths))
new_paths = []
assert not is_string(paths), repr(paths)
for n in paths:
if is_string(n):
if '*' in n or '?' in n:
p = sorted_glob(n)
p2 = sorted_glob(njoin(local_path, n))
if p2:
new_paths.extend(p2)
elif p:
new_paths.extend(p)
else:
if include_non_existing:
new_paths.append(n)
print('could not resolve pattern in %r: %r' %
(local_path, n))
else:
n2 = njoin(local_path, n)
if os.path.exists(n2):
new_paths.append(n2)
else:
if os.path.exists(n):
new_paths.append(n)
elif include_non_existing:
new_paths.append(n)
if not os.path.exists(n):
print('non-existing path in %r: %r' %
(local_path, n))
elif is_sequence(n):
new_paths.extend(_fix_paths(n, local_path, include_non_existing))
else:
new_paths.append(n)
return [minrelpath(p) for p in new_paths]
def gpaths(paths, local_path='', include_non_existing=True):
"""Apply glob to paths and prepend local_path if needed.
"""
if is_string(paths):
paths = (paths,)
return _fix_paths(paths, local_path, include_non_existing)
def make_temp_file(suffix='', prefix='', text=True):
if not hasattr(_tdata, 'tempdir'):
_tdata.tempdir = tempfile.mkdtemp()
_tmpdirs.append(_tdata.tempdir)
fid, name = tempfile.mkstemp(suffix=suffix,
prefix=prefix,
dir=_tdata.tempdir,
text=text)
fo = os.fdopen(fid, 'w')
return fo, name
# Hooks for colored terminal output.
# See also https://web.archive.org/web/20100314204946/http://www.livinglogic.de/Python/ansistyle
def terminal_has_colors():
if sys.platform=='cygwin' and 'USE_COLOR' not in os.environ:
# Avoid importing curses that causes illegal operation
# with a message:
# PYTHON2 caused an invalid page fault in
# module CYGNURSES7.DLL as 015f:18bbfc28
# Details: Python 2.3.3 [GCC 3.3.1 (cygming special)]
# ssh to Win32 machine from debian
# curses.version is 2.2
# CYGWIN_98-4.10, release 1.5.7(0.109/3/2))
return 0
if hasattr(sys.stdout, 'isatty') and sys.stdout.isatty():
try:
import curses
curses.setupterm()
if (curses.tigetnum("colors") >= 0
and curses.tigetnum("pairs") >= 0
and ((curses.tigetstr("setf") is not None
and curses.tigetstr("setb") is not None)
or (curses.tigetstr("setaf") is not None
and curses.tigetstr("setab") is not None)
or curses.tigetstr("scp") is not None)):
return 1
except Exception:
pass
return 0
if terminal_has_colors():
_colour_codes = dict(black=0, red=1, green=2, yellow=3,
blue=4, magenta=5, cyan=6, white=7, default=9)
def colour_text(s, fg=None, bg=None, bold=False):
seq = []
if bold:
seq.append('1')
if fg:
fgcode = 30 + _colour_codes.get(fg.lower(), 0)
seq.append(str(fgcode))
if bg:
bgcode = 40 + _colour_codes.get(fg.lower(), 7)
seq.append(str(bgcode))
if seq:
return '\x1b[%sm%s\x1b[0m' % (';'.join(seq), s)
else:
return s
else:
def colour_text(s, fg=None, bg=None):
return s
def default_text(s):
return colour_text(s, 'default')
def red_text(s):
return colour_text(s, 'red')
def green_text(s):
return colour_text(s, 'green')
def yellow_text(s):
return colour_text(s, 'yellow')
def cyan_text(s):
return colour_text(s, 'cyan')
def blue_text(s):
return colour_text(s, 'blue')
#########################
def cyg2win32(path):
if sys.platform=='cygwin' and path.startswith('/cygdrive'):
path = path[10] + ':' + os.path.normcase(path[11:])
return path
def mingw32():
"""Return true when using mingw32 environment.
"""
if sys.platform=='win32':
if os.environ.get('OSTYPE', '')=='msys':
return True
if os.environ.get('MSYSTEM', '')=='MINGW32':
return True
return False
def msvc_runtime_version():
"Return version of MSVC runtime library, as defined by __MSC_VER__ macro"
msc_pos = sys.version.find('MSC v.')
if msc_pos != -1:
msc_ver = int(sys.version[msc_pos+6:msc_pos+10])
else:
msc_ver = None
return msc_ver
def msvc_runtime_library():
"Return name of MSVC runtime library if Python was built with MSVC >= 7"
ver = msvc_runtime_major ()
if ver:
if ver < 140:
return "msvcr%i" % ver
else:
return "vcruntime%i" % ver
else:
return None
def msvc_runtime_major():
"Return major version of MSVC runtime coded like get_build_msvc_version"
major = {1300: 70, # MSVC 7.0
1310: 71, # MSVC 7.1
1400: 80, # MSVC 8
1500: 90, # MSVC 9 (aka 2008)
1600: 100, # MSVC 10 (aka 2010)
1900: 140, # MSVC 14 (aka 2015)
}.get(msvc_runtime_version(), None)
return major
#########################
#XXX need support for .C that is also C++
cxx_ext_match = re.compile(r'.*[.](cpp|cxx|cc)\Z', re.I).match
fortran_ext_match = re.compile(r'.*[.](f90|f95|f77|for|ftn|f)\Z', re.I).match
f90_ext_match = re.compile(r'.*[.](f90|f95)\Z', re.I).match
f90_module_name_match = re.compile(r'\s*module\s*(?P<name>[\w_]+)', re.I).match
def _get_f90_modules(source):
"""Return a list of Fortran f90 module names that
given source file defines.
"""
if not f90_ext_match(source):
return []
modules = []
with open(source, 'r') as f:
for line in f:
m = f90_module_name_match(line)
if m:
name = m.group('name')
modules.append(name)
# break # XXX can we assume that there is one module per file?
return modules
def is_string(s):
return isinstance(s, str)
def all_strings(lst):
"""Return True if all items in lst are string objects. """
for item in lst:
if not is_string(item):
return False
return True
def is_sequence(seq):
if is_string(seq):
return False
try:
len(seq)
except Exception:
return False
return True
def is_glob_pattern(s):
return is_string(s) and ('*' in s or '?' in s)
def as_list(seq):
if is_sequence(seq):
return list(seq)
else:
return [seq]
def get_language(sources):
# not used in numpy/scipy packages, use build_ext.detect_language instead
"""Determine language value (c,f77,f90) from sources """
language = None
for source in sources:
if isinstance(source, str):
if f90_ext_match(source):
language = 'f90'
break
elif fortran_ext_match(source):
language = 'f77'
return language
def has_f_sources(sources):
"""Return True if sources contains Fortran files """
for source in sources:
if fortran_ext_match(source):
return True
return False
def has_cxx_sources(sources):
"""Return True if sources contains C++ files """
for source in sources:
if cxx_ext_match(source):
return True
return False
def filter_sources(sources):
"""Return four lists of filenames containing
C, C++, Fortran, and Fortran 90 module sources,
respectively.
"""
c_sources = []
cxx_sources = []
f_sources = []
fmodule_sources = []
for source in sources:
if fortran_ext_match(source):
modules = _get_f90_modules(source)
if modules:
fmodule_sources.append(source)
else:
f_sources.append(source)
elif cxx_ext_match(source):
cxx_sources.append(source)
else:
c_sources.append(source)
return c_sources, cxx_sources, f_sources, fmodule_sources
def _get_headers(directory_list):
# get *.h files from list of directories
headers = []
for d in directory_list:
head = sorted_glob(os.path.join(d, "*.h")) #XXX: *.hpp files??
headers.extend(head)
return headers
def _get_directories(list_of_sources):
# get unique directories from list of sources.
direcs = []
for f in list_of_sources:
d = os.path.split(f)
if d[0] != '' and not d[0] in direcs:
direcs.append(d[0])
return direcs
def _commandline_dep_string(cc_args, extra_postargs, pp_opts):
"""
Return commandline representation used to determine if a file needs
to be recompiled
"""
cmdline = 'commandline: '
cmdline += ' '.join(cc_args)
cmdline += ' '.join(extra_postargs)
cmdline += ' '.join(pp_opts) + '\n'
return cmdline
def get_dependencies(sources):
#XXX scan sources for include statements
return _get_headers(_get_directories(sources))
def is_local_src_dir(directory):
"""Return true if directory is local directory.
"""
if not is_string(directory):
return False
abs_dir = os.path.abspath(directory)
c = os.path.commonprefix([os.getcwd(), abs_dir])
new_dir = abs_dir[len(c):].split(os.sep)
if new_dir and not new_dir[0]:
new_dir = new_dir[1:]
if new_dir and new_dir[0]=='build':
return False
new_dir = os.sep.join(new_dir)
return os.path.isdir(new_dir)
def general_source_files(top_path):
pruned_directories = {'CVS':1, '.svn':1, 'build':1}
prune_file_pat = re.compile(r'(?:[~#]|\.py[co]|\.o)$')
for dirpath, dirnames, filenames in os.walk(top_path, topdown=True):
pruned = [ d for d in dirnames if d not in pruned_directories ]
dirnames[:] = pruned
for f in filenames:
if not prune_file_pat.search(f):
yield os.path.join(dirpath, f)
def general_source_directories_files(top_path):
"""Return a directory name relative to top_path and
files contained.
"""
pruned_directories = ['CVS', '.svn', 'build']
prune_file_pat = re.compile(r'(?:[~#]|\.py[co]|\.o)$')
for dirpath, dirnames, filenames in os.walk(top_path, topdown=True):
pruned = [ d for d in dirnames if d not in pruned_directories ]
dirnames[:] = pruned
for d in dirnames:
dpath = os.path.join(dirpath, d)
rpath = rel_path(dpath, top_path)
files = []
for f in os.listdir(dpath):
fn = os.path.join(dpath, f)
if os.path.isfile(fn) and not prune_file_pat.search(fn):
files.append(fn)
yield rpath, files
dpath = top_path
rpath = rel_path(dpath, top_path)
filenames = [os.path.join(dpath, f) for f in os.listdir(dpath) \
if not prune_file_pat.search(f)]
files = [f for f in filenames if os.path.isfile(f)]
yield rpath, files
def get_ext_source_files(ext):
# Get sources and any include files in the same directory.
filenames = []
sources = [_m for _m in ext.sources if is_string(_m)]
filenames.extend(sources)
filenames.extend(get_dependencies(sources))
for d in ext.depends:
if is_local_src_dir(d):
filenames.extend(list(general_source_files(d)))
elif os.path.isfile(d):
filenames.append(d)
return filenames
def get_script_files(scripts):
scripts = [_m for _m in scripts if is_string(_m)]
return scripts
def get_lib_source_files(lib):
filenames = []
sources = lib[1].get('sources', [])
sources = [_m for _m in sources if is_string(_m)]
filenames.extend(sources)
filenames.extend(get_dependencies(sources))
depends = lib[1].get('depends', [])
for d in depends:
if is_local_src_dir(d):
filenames.extend(list(general_source_files(d)))
elif os.path.isfile(d):
filenames.append(d)
return filenames
def get_shared_lib_extension(is_python_ext=False):
"""Return the correct file extension for shared libraries.
Parameters
----------
is_python_ext : bool, optional
Whether the shared library is a Python extension. Default is False.
Returns
-------
so_ext : str
The shared library extension.
Notes
-----
For Python shared libs, `so_ext` will typically be '.so' on Linux and OS X,
and '.pyd' on Windows. For Python >= 3.2 `so_ext` has a tag prepended on
POSIX systems according to PEP 3149. For Python 3.2 this is implemented on
Linux, but not on OS X.
"""
confvars = distutils.sysconfig.get_config_vars()
# SO is deprecated in 3.3.1, use EXT_SUFFIX instead
so_ext = confvars.get('EXT_SUFFIX', None)
if so_ext is None:
so_ext = confvars.get('SO', '')
if not is_python_ext:
# hardcode known values, config vars (including SHLIB_SUFFIX) are
# unreliable (see #3182)
# darwin, windows and debug linux are wrong in 3.3.1 and older
if (sys.platform.startswith('linux') or
sys.platform.startswith('gnukfreebsd')):
so_ext = '.so'
elif sys.platform.startswith('darwin'):
so_ext = '.dylib'
elif sys.platform.startswith('win'):
so_ext = '.dll'
else:
# fall back to config vars for unknown platforms
# fix long extension for Python >=3.2, see PEP 3149.
if 'SOABI' in confvars:
# Does nothing unless SOABI config var exists
so_ext = so_ext.replace('.' + confvars.get('SOABI'), '', 1)
return so_ext
def get_data_files(data):
if is_string(data):
return [data]
sources = data[1]
filenames = []
for s in sources:
if hasattr(s, '__call__'):
continue
if is_local_src_dir(s):
filenames.extend(list(general_source_files(s)))
elif is_string(s):
if os.path.isfile(s):
filenames.append(s)
else:
print('Not existing data file:', s)
else:
raise TypeError(repr(s))
return filenames
def dot_join(*args):
return '.'.join([a for a in args if a])
def get_frame(level=0):
"""Return frame object from call stack with given level.
"""
try:
return sys._getframe(level+1)
except AttributeError:
frame = sys.exc_info()[2].tb_frame
for _ in range(level+1):
frame = frame.f_back
return frame
######################
class Configuration:
_list_keys = ['packages', 'ext_modules', 'data_files', 'include_dirs',
'libraries', 'headers', 'scripts', 'py_modules',
'installed_libraries', 'define_macros']
_dict_keys = ['package_dir', 'installed_pkg_config']
_extra_keys = ['name', 'version']
numpy_include_dirs = []
def __init__(self,
package_name=None,
parent_name=None,
top_path=None,
package_path=None,
caller_level=1,
setup_name='setup.py',
**attrs):
"""Construct configuration instance of a package.
package_name -- name of the package
Ex.: 'distutils'
parent_name -- name of the parent package
Ex.: 'numpy'
top_path -- directory of the toplevel package
Ex.: the directory where the numpy package source sits
package_path -- directory of package. Will be computed by magic from the
directory of the caller module if not specified
Ex.: the directory where numpy.distutils is
caller_level -- frame level to caller namespace, internal parameter.
"""
self.name = dot_join(parent_name, package_name)
self.version = None
caller_frame = get_frame(caller_level)
self.local_path = get_path_from_frame(caller_frame, top_path)
# local_path -- directory of a file (usually setup.py) that
# defines a configuration() function.
# local_path -- directory of a file (usually setup.py) that
# defines a configuration() function.
if top_path is None:
top_path = self.local_path
self.local_path = ''
if package_path is None:
package_path = self.local_path
elif os.path.isdir(njoin(self.local_path, package_path)):
package_path = njoin(self.local_path, package_path)
if not os.path.isdir(package_path or '.'):
raise ValueError("%r is not a directory" % (package_path,))
self.top_path = top_path
self.package_path = package_path
# this is the relative path in the installed package
self.path_in_package = os.path.join(*self.name.split('.'))
self.list_keys = self._list_keys[:]
self.dict_keys = self._dict_keys[:]
for n in self.list_keys:
v = copy.copy(attrs.get(n, []))
setattr(self, n, as_list(v))
for n in self.dict_keys:
v = copy.copy(attrs.get(n, {}))
setattr(self, n, v)
known_keys = self.list_keys + self.dict_keys
self.extra_keys = self._extra_keys[:]
for n in attrs.keys():
if n in known_keys:
continue
a = attrs[n]
setattr(self, n, a)
if isinstance(a, list):
self.list_keys.append(n)
elif isinstance(a, dict):
self.dict_keys.append(n)
else:
self.extra_keys.append(n)
if os.path.exists(njoin(package_path, '__init__.py')):
self.packages.append(self.name)
self.package_dir[self.name] = package_path
self.options = dict(
ignore_setup_xxx_py = False,
assume_default_configuration = False,
delegate_options_to_subpackages = False,
quiet = False,
)
caller_instance = None
for i in range(1, 3):
try:
f = get_frame(i)
except ValueError:
break
try:
caller_instance = eval('self', f.f_globals, f.f_locals)
break
except NameError:
pass
if isinstance(caller_instance, self.__class__):
if caller_instance.options['delegate_options_to_subpackages']:
self.set_options(**caller_instance.options)
self.setup_name = setup_name
def todict(self):
"""
Return a dictionary compatible with the keyword arguments of distutils
setup function.
Examples
--------
>>> setup(**config.todict()) #doctest: +SKIP
"""
self._optimize_data_files()
d = {}
known_keys = self.list_keys + self.dict_keys + self.extra_keys
for n in known_keys:
a = getattr(self, n)
if a:
d[n] = a
return d
def info(self, message):
if not self.options['quiet']:
print(message)
def warn(self, message):
sys.stderr.write('Warning: %s\n' % (message,))
def set_options(self, **options):
"""
Configure Configuration instance.
The following options are available:
- ignore_setup_xxx_py
- assume_default_configuration
- delegate_options_to_subpackages
- quiet
"""
for key, value in options.items():
if key in self.options:
self.options[key] = value
else:
raise ValueError('Unknown option: '+key)
def get_distribution(self):
"""Return the distutils distribution object for self."""
from numpy.distutils.core import get_distribution
return get_distribution()
def _wildcard_get_subpackage(self, subpackage_name,
parent_name,
caller_level = 1):
l = subpackage_name.split('.')
subpackage_path = njoin([self.local_path]+l)
dirs = [_m for _m in sorted_glob(subpackage_path) if os.path.isdir(_m)]
config_list = []
for d in dirs:
if not os.path.isfile(njoin(d, '__init__.py')):
continue
if 'build' in d.split(os.sep):
continue
n = '.'.join(d.split(os.sep)[-len(l):])
c = self.get_subpackage(n,
parent_name = parent_name,
caller_level = caller_level+1)
config_list.extend(c)
return config_list
def _get_configuration_from_setup_py(self, setup_py,
subpackage_name,
subpackage_path,
parent_name,
caller_level = 1):
# In case setup_py imports local modules:
sys.path.insert(0, os.path.dirname(setup_py))
try:
setup_name = os.path.splitext(os.path.basename(setup_py))[0]
n = dot_join(self.name, subpackage_name, setup_name)
setup_module = npy_load_module('_'.join(n.split('.')),
setup_py,
('.py', 'U', 1))
if not hasattr(setup_module, 'configuration'):
if not self.options['assume_default_configuration']:
self.warn('Assuming default configuration '\
'(%s does not define configuration())'\
% (setup_module))
config = Configuration(subpackage_name, parent_name,
self.top_path, subpackage_path,
caller_level = caller_level + 1)
else:
pn = dot_join(*([parent_name] + subpackage_name.split('.')[:-1]))
args = (pn,)
if setup_module.configuration.__code__.co_argcount > 1:
args = args + (self.top_path,)
config = setup_module.configuration(*args)
if config.name!=dot_join(parent_name, subpackage_name):
self.warn('Subpackage %r configuration returned as %r' % \
(dot_join(parent_name, subpackage_name), config.name))
finally:
del sys.path[0]
return config
def get_subpackage(self,subpackage_name,
subpackage_path=None,
parent_name=None,
caller_level = 1):
"""Return list of subpackage configurations.
Parameters
----------
subpackage_name : str or None
Name of the subpackage to get the configuration. '*' in
subpackage_name is handled as a wildcard.
subpackage_path : str
If None, then the path is assumed to be the local path plus the
subpackage_name. If a setup.py file is not found in the
subpackage_path, then a default configuration is used.
parent_name : str
Parent name.
"""
if subpackage_name is None:
if subpackage_path is None:
raise ValueError(
"either subpackage_name or subpackage_path must be specified")
subpackage_name = os.path.basename(subpackage_path)
# handle wildcards
l = subpackage_name.split('.')
if subpackage_path is None and '*' in subpackage_name:
return self._wildcard_get_subpackage(subpackage_name,
parent_name,
caller_level = caller_level+1)
assert '*' not in subpackage_name, repr((subpackage_name, subpackage_path, parent_name))
if subpackage_path is None:
subpackage_path = njoin([self.local_path] + l)
else:
subpackage_path = njoin([subpackage_path] + l[:-1])
subpackage_path = self.paths([subpackage_path])[0]
setup_py = njoin(subpackage_path, self.setup_name)
if not self.options['ignore_setup_xxx_py']:
if not os.path.isfile(setup_py):
setup_py = njoin(subpackage_path,
'setup_%s.py' % (subpackage_name))
if not os.path.isfile(setup_py):
if not self.options['assume_default_configuration']:
self.warn('Assuming default configuration '\
'(%s/{setup_%s,setup}.py was not found)' \
% (os.path.dirname(setup_py), subpackage_name))
config = Configuration(subpackage_name, parent_name,
self.top_path, subpackage_path,
caller_level = caller_level+1)
else:
config = self._get_configuration_from_setup_py(
setup_py,
subpackage_name,
subpackage_path,
parent_name,
caller_level = caller_level + 1)
if config:
return [config]
else:
return []
def add_subpackage(self,subpackage_name,
subpackage_path=None,
standalone = False):
"""Add a sub-package to the current Configuration instance.
This is useful in a setup.py script for adding sub-packages to a
package.
Parameters
----------
subpackage_name : str
name of the subpackage
subpackage_path : str
if given, the subpackage path such as the subpackage is in
subpackage_path / subpackage_name. If None,the subpackage is
assumed to be located in the local path / subpackage_name.
standalone : bool
"""
if standalone:
parent_name = None
else:
parent_name = self.name
config_list = self.get_subpackage(subpackage_name, subpackage_path,
parent_name = parent_name,
caller_level = 2)
if not config_list:
self.warn('No configuration returned, assuming unavailable.')
for config in config_list:
d = config
if isinstance(config, Configuration):
d = config.todict()
assert isinstance(d, dict), repr(type(d))
self.info('Appending %s configuration to %s' \
% (d.get('name'), self.name))
self.dict_append(**d)
dist = self.get_distribution()
if dist is not None:
self.warn('distutils distribution has been initialized,'\
' it may be too late to add a subpackage '+ subpackage_name)
def add_data_dir(self, data_path):
"""Recursively add files under data_path to data_files list.
Recursively add files under data_path to the list of data_files to be
installed (and distributed). The data_path can be either a relative
path-name, or an absolute path-name, or a 2-tuple where the first
argument shows where in the install directory the data directory
should be installed to.
Parameters
----------
data_path : seq or str
Argument can be either
* 2-sequence (<datadir suffix>, <path to data directory>)
* path to data directory where python datadir suffix defaults
to package dir.
Notes
-----
Rules for installation paths::
foo/bar -> (foo/bar, foo/bar) -> parent/foo/bar
(gun, foo/bar) -> parent/gun
foo/* -> (foo/a, foo/a), (foo/b, foo/b) -> parent/foo/a, parent/foo/b
(gun, foo/*) -> (gun, foo/a), (gun, foo/b) -> gun
(gun/*, foo/*) -> parent/gun/a, parent/gun/b
/foo/bar -> (bar, /foo/bar) -> parent/bar
(gun, /foo/bar) -> parent/gun
(fun/*/gun/*, sun/foo/bar) -> parent/fun/foo/gun/bar
Examples
--------
For example suppose the source directory contains fun/foo.dat and
fun/bar/car.dat:
>>> self.add_data_dir('fun') #doctest: +SKIP
>>> self.add_data_dir(('sun', 'fun')) #doctest: +SKIP
>>> self.add_data_dir(('gun', '/full/path/to/fun'))#doctest: +SKIP
Will install data-files to the locations::
<package install directory>/
fun/
foo.dat
bar/
car.dat
sun/
foo.dat
bar/
car.dat
gun/
foo.dat
car.dat
"""
if is_sequence(data_path):
d, data_path = data_path
else:
d = None
if is_sequence(data_path):
[self.add_data_dir((d, p)) for p in data_path]
return
if not is_string(data_path):
raise TypeError("not a string: %r" % (data_path,))
if d is None:
if os.path.isabs(data_path):
return self.add_data_dir((os.path.basename(data_path), data_path))
return self.add_data_dir((data_path, data_path))
paths = self.paths(data_path, include_non_existing=False)
if is_glob_pattern(data_path):
if is_glob_pattern(d):
pattern_list = allpath(d).split(os.sep)
pattern_list.reverse()
# /a/*//b/ -> /a/*/b
rl = list(range(len(pattern_list)-1)); rl.reverse()
for i in rl:
if not pattern_list[i]:
del pattern_list[i]
#
for path in paths:
if not os.path.isdir(path):
print('Not a directory, skipping', path)
continue
rpath = rel_path(path, self.local_path)
path_list = rpath.split(os.sep)
path_list.reverse()
target_list = []
i = 0
for s in pattern_list:
if is_glob_pattern(s):
if i>=len(path_list):
raise ValueError('cannot fill pattern %r with %r' \
% (d, path))
target_list.append(path_list[i])
else:
assert s==path_list[i], repr((s, path_list[i], data_path, d, path, rpath))
target_list.append(s)
i += 1
if path_list[i:]:
self.warn('mismatch of pattern_list=%s and path_list=%s'\
% (pattern_list, path_list))
target_list.reverse()
self.add_data_dir((os.sep.join(target_list), path))
else:
for path in paths:
self.add_data_dir((d, path))
return
assert not is_glob_pattern(d), repr(d)
dist = self.get_distribution()
if dist is not None and dist.data_files is not None:
data_files = dist.data_files
else:
data_files = self.data_files
for path in paths:
for d1, f in list(general_source_directories_files(path)):
target_path = os.path.join(self.path_in_package, d, d1)
data_files.append((target_path, f))
def _optimize_data_files(self):
data_dict = {}
for p, files in self.data_files:
if p not in data_dict:
data_dict[p] = set()
for f in files:
data_dict[p].add(f)
self.data_files[:] = [(p, list(files)) for p, files in data_dict.items()]
def add_data_files(self,*files):
"""Add data files to configuration data_files.
Parameters
----------
files : sequence
Argument(s) can be either
* 2-sequence (<datadir prefix>,<path to data file(s)>)
* paths to data files where python datadir prefix defaults
to package dir.
Notes
-----
The form of each element of the files sequence is very flexible
allowing many combinations of where to get the files from the package
and where they should ultimately be installed on the system. The most
basic usage is for an element of the files argument sequence to be a
simple filename. This will cause that file from the local path to be
installed to the installation path of the self.name package (package
path). The file argument can also be a relative path in which case the
entire relative path will be installed into the package directory.
Finally, the file can be an absolute path name in which case the file
will be found at the absolute path name but installed to the package
path.
This basic behavior can be augmented by passing a 2-tuple in as the
file argument. The first element of the tuple should specify the
relative path (under the package install directory) where the
remaining sequence of files should be installed to (it has nothing to
do with the file-names in the source distribution). The second element
of the tuple is the sequence of files that should be installed. The
files in this sequence can be filenames, relative paths, or absolute
paths. For absolute paths the file will be installed in the top-level
package installation directory (regardless of the first argument).
Filenames and relative path names will be installed in the package
install directory under the path name given as the first element of
the tuple.
Rules for installation paths:
#. file.txt -> (., file.txt)-> parent/file.txt
#. foo/file.txt -> (foo, foo/file.txt) -> parent/foo/file.txt
#. /foo/bar/file.txt -> (., /foo/bar/file.txt) -> parent/file.txt
#. ``*``.txt -> parent/a.txt, parent/b.txt
#. foo/``*``.txt`` -> parent/foo/a.txt, parent/foo/b.txt
#. ``*/*.txt`` -> (``*``, ``*``/``*``.txt) -> parent/c/a.txt, parent/d/b.txt
#. (sun, file.txt) -> parent/sun/file.txt
#. (sun, bar/file.txt) -> parent/sun/file.txt
#. (sun, /foo/bar/file.txt) -> parent/sun/file.txt
#. (sun, ``*``.txt) -> parent/sun/a.txt, parent/sun/b.txt
#. (sun, bar/``*``.txt) -> parent/sun/a.txt, parent/sun/b.txt
#. (sun/``*``, ``*``/``*``.txt) -> parent/sun/c/a.txt, parent/d/b.txt
An additional feature is that the path to a data-file can actually be
a function that takes no arguments and returns the actual path(s) to
the data-files. This is useful when the data files are generated while
building the package.
Examples
--------
Add files to the list of data_files to be included with the package.
>>> self.add_data_files('foo.dat',
... ('fun', ['gun.dat', 'nun/pun.dat', '/tmp/sun.dat']),
... 'bar/cat.dat',
... '/full/path/to/can.dat') #doctest: +SKIP
will install these data files to::
<package install directory>/
foo.dat
fun/
gun.dat
nun/
pun.dat
sun.dat
bar/
car.dat
can.dat
where <package install directory> is the package (or sub-package)
directory such as '/usr/lib/python2.4/site-packages/mypackage' ('C:
\\Python2.4 \\Lib \\site-packages \\mypackage') or
'/usr/lib/python2.4/site- packages/mypackage/mysubpackage' ('C:
\\Python2.4 \\Lib \\site-packages \\mypackage \\mysubpackage').
"""
if len(files)>1:
for f in files:
self.add_data_files(f)
return
assert len(files)==1
if is_sequence(files[0]):
d, files = files[0]
else:
d = None
if is_string(files):
filepat = files
elif is_sequence(files):
if len(files)==1:
filepat = files[0]
else:
for f in files:
self.add_data_files((d, f))
return
else:
raise TypeError(repr(type(files)))
if d is None:
if hasattr(filepat, '__call__'):
d = ''
elif os.path.isabs(filepat):
d = ''
else:
d = os.path.dirname(filepat)
self.add_data_files((d, files))
return
paths = self.paths(filepat, include_non_existing=False)
if is_glob_pattern(filepat):
if is_glob_pattern(d):
pattern_list = d.split(os.sep)
pattern_list.reverse()
for path in paths:
path_list = path.split(os.sep)
path_list.reverse()
path_list.pop() # filename
target_list = []
i = 0
for s in pattern_list:
if is_glob_pattern(s):
target_list.append(path_list[i])
i += 1
else:
target_list.append(s)
target_list.reverse()
self.add_data_files((os.sep.join(target_list), path))
else:
self.add_data_files((d, paths))
return
assert not is_glob_pattern(d), repr((d, filepat))
dist = self.get_distribution()
if dist is not None and dist.data_files is not None:
data_files = dist.data_files
else:
data_files = self.data_files
data_files.append((os.path.join(self.path_in_package, d), paths))
### XXX Implement add_py_modules
def add_define_macros(self, macros):
"""Add define macros to configuration
Add the given sequence of macro name and value duples to the beginning
of the define_macros list This list will be visible to all extension
modules of the current package.
"""
dist = self.get_distribution()
if dist is not None:
if not hasattr(dist, 'define_macros'):
dist.define_macros = []
dist.define_macros.extend(macros)
else:
self.define_macros.extend(macros)
def add_include_dirs(self,*paths):
"""Add paths to configuration include directories.
Add the given sequence of paths to the beginning of the include_dirs
list. This list will be visible to all extension modules of the
current package.
"""
include_dirs = self.paths(paths)
dist = self.get_distribution()
if dist is not None:
if dist.include_dirs is None:
dist.include_dirs = []
dist.include_dirs.extend(include_dirs)
else:
self.include_dirs.extend(include_dirs)
def add_headers(self,*files):
"""Add installable headers to configuration.
Add the given sequence of files to the beginning of the headers list.
By default, headers will be installed under <python-
include>/<self.name.replace('.','/')>/ directory. If an item of files
is a tuple, then its first argument specifies the actual installation
location relative to the <python-include> path.
Parameters
----------
files : str or seq
Argument(s) can be either:
* 2-sequence (<includedir suffix>,<path to header file(s)>)
* path(s) to header file(s) where python includedir suffix will
default to package name.
"""
headers = []
for path in files:
if is_string(path):
[headers.append((self.name, p)) for p in self.paths(path)]
else:
if not isinstance(path, (tuple, list)) or len(path) != 2:
raise TypeError(repr(path))
[headers.append((path[0], p)) for p in self.paths(path[1])]
dist = self.get_distribution()
if dist is not None:
if dist.headers is None:
dist.headers = []
dist.headers.extend(headers)
else:
self.headers.extend(headers)
def paths(self,*paths,**kws):
"""Apply glob to paths and prepend local_path if needed.
Applies glob.glob(...) to each path in the sequence (if needed) and
pre-pends the local_path if needed. Because this is called on all
source lists, this allows wildcard characters to be specified in lists
of sources for extension modules and libraries and scripts and allows
path-names be relative to the source directory.
"""
include_non_existing = kws.get('include_non_existing', True)
return gpaths(paths,
local_path = self.local_path,
include_non_existing=include_non_existing)
def _fix_paths_dict(self, kw):
for k in kw.keys():
v = kw[k]
if k in ['sources', 'depends', 'include_dirs', 'library_dirs',
'module_dirs', 'extra_objects']:
new_v = self.paths(v)
kw[k] = new_v
def add_extension(self,name,sources,**kw):
"""Add extension to configuration.
Create and add an Extension instance to the ext_modules list. This
method also takes the following optional keyword arguments that are
passed on to the Extension constructor.
Parameters
----------
name : str
name of the extension
sources : seq
list of the sources. The list of sources may contain functions
(called source generators) which must take an extension instance
and a build directory as inputs and return a source file or list of
source files or None. If None is returned then no sources are
generated. If the Extension instance has no sources after
processing all source generators, then no extension module is
built.
include_dirs :
define_macros :
undef_macros :
library_dirs :
libraries :
runtime_library_dirs :
extra_objects :
extra_compile_args :
extra_link_args :
extra_f77_compile_args :
extra_f90_compile_args :
export_symbols :
swig_opts :
depends :
The depends list contains paths to files or directories that the
sources of the extension module depend on. If any path in the
depends list is newer than the extension module, then the module
will be rebuilt.
language :
f2py_options :
module_dirs :
extra_info : dict or list
dict or list of dict of keywords to be appended to keywords.
Notes
-----
The self.paths(...) method is applied to all lists that may contain
paths.
"""
ext_args = copy.copy(kw)
ext_args['name'] = dot_join(self.name, name)
ext_args['sources'] = sources
if 'extra_info' in ext_args:
extra_info = ext_args['extra_info']
del ext_args['extra_info']
if isinstance(extra_info, dict):
extra_info = [extra_info]
for info in extra_info:
assert isinstance(info, dict), repr(info)
dict_append(ext_args,**info)
self._fix_paths_dict(ext_args)
# Resolve out-of-tree dependencies
libraries = ext_args.get('libraries', [])
libnames = []
ext_args['libraries'] = []
for libname in libraries:
if isinstance(libname, tuple):
self._fix_paths_dict(libname[1])
# Handle library names of the form libname@relative/path/to/library
if '@' in libname:
lname, lpath = libname.split('@', 1)
lpath = os.path.abspath(njoin(self.local_path, lpath))
if os.path.isdir(lpath):
c = self.get_subpackage(None, lpath,
caller_level = 2)
if isinstance(c, Configuration):
c = c.todict()
for l in [l[0] for l in c.get('libraries', [])]:
llname = l.split('__OF__', 1)[0]
if llname == lname:
c.pop('name', None)
dict_append(ext_args,**c)
break
continue
libnames.append(libname)
ext_args['libraries'] = libnames + ext_args['libraries']
ext_args['define_macros'] = \
self.define_macros + ext_args.get('define_macros', [])
from numpy.distutils.core import Extension
ext = Extension(**ext_args)
self.ext_modules.append(ext)
dist = self.get_distribution()
if dist is not None:
self.warn('distutils distribution has been initialized,'\
' it may be too late to add an extension '+name)
return ext
def add_library(self,name,sources,**build_info):
"""
Add library to configuration.
Parameters
----------
name : str
Name of the extension.
sources : sequence
List of the sources. The list of sources may contain functions
(called source generators) which must take an extension instance
and a build directory as inputs and return a source file or list of
source files or None. If None is returned then no sources are
generated. If the Extension instance has no sources after
processing all source generators, then no extension module is
built.
build_info : dict, optional
The following keys are allowed:
* depends
* macros
* include_dirs
* extra_compiler_args
* extra_f77_compile_args
* extra_f90_compile_args
* f2py_options
* language
"""
self._add_library(name, sources, None, build_info)
dist = self.get_distribution()
if dist is not None:
self.warn('distutils distribution has been initialized,'\
' it may be too late to add a library '+ name)
def _add_library(self, name, sources, install_dir, build_info):
"""Common implementation for add_library and add_installed_library. Do
not use directly"""
build_info = copy.copy(build_info)
build_info['sources'] = sources
# Sometimes, depends is not set up to an empty list by default, and if
# depends is not given to add_library, distutils barfs (#1134)
if not 'depends' in build_info:
build_info['depends'] = []
self._fix_paths_dict(build_info)
# Add to libraries list so that it is build with build_clib
self.libraries.append((name, build_info))
def add_installed_library(self, name, sources, install_dir, build_info=None):
"""
Similar to add_library, but the specified library is installed.
Most C libraries used with `distutils` are only used to build python
extensions, but libraries built through this method will be installed
so that they can be reused by third-party packages.
Parameters
----------
name : str
Name of the installed library.
sources : sequence
List of the library's source files. See `add_library` for details.
install_dir : str
Path to install the library, relative to the current sub-package.
build_info : dict, optional
The following keys are allowed:
* depends
* macros
* include_dirs
* extra_compiler_args
* extra_f77_compile_args
* extra_f90_compile_args
* f2py_options
* language
Returns
-------
None
See Also
--------
add_library, add_npy_pkg_config, get_info
Notes
-----
The best way to encode the options required to link against the specified
C libraries is to use a "libname.ini" file, and use `get_info` to
retrieve the required options (see `add_npy_pkg_config` for more
information).
"""
if not build_info:
build_info = {}
install_dir = os.path.join(self.package_path, install_dir)
self._add_library(name, sources, install_dir, build_info)
self.installed_libraries.append(InstallableLib(name, build_info, install_dir))
def add_npy_pkg_config(self, template, install_dir, subst_dict=None):
"""
Generate and install a npy-pkg config file from a template.
The config file generated from `template` is installed in the
given install directory, using `subst_dict` for variable substitution.
Parameters
----------
template : str
The path of the template, relatively to the current package path.
install_dir : str
Where to install the npy-pkg config file, relatively to the current
package path.
subst_dict : dict, optional
If given, any string of the form ``@key@`` will be replaced by
``subst_dict[key]`` in the template file when installed. The install
prefix is always available through the variable ``@prefix@``, since the
install prefix is not easy to get reliably from setup.py.
See also
--------
add_installed_library, get_info
Notes
-----
This works for both standard installs and in-place builds, i.e. the
``@prefix@`` refer to the source directory for in-place builds.
Examples
--------
::
config.add_npy_pkg_config('foo.ini.in', 'lib', {'foo': bar})
Assuming the foo.ini.in file has the following content::
[meta]
Name=@foo@
Version=1.0
Description=dummy description
[default]
Cflags=-I@prefix@/include
Libs=
The generated file will have the following content::
[meta]
Name=bar
Version=1.0
Description=dummy description
[default]
Cflags=-Iprefix_dir/include
Libs=
and will be installed as foo.ini in the 'lib' subpath.
When cross-compiling with numpy distutils, it might be necessary to
use modified npy-pkg-config files. Using the default/generated files
will link with the host libraries (i.e. libnpymath.a). For
cross-compilation you of-course need to link with target libraries,
while using the host Python installation.
You can copy out the numpy/core/lib/npy-pkg-config directory, add a
pkgdir value to the .ini files and set NPY_PKG_CONFIG_PATH environment
variable to point to the directory with the modified npy-pkg-config
files.
Example npymath.ini modified for cross-compilation::
[meta]
Name=npymath
Description=Portable, core math library implementing C99 standard
Version=0.1
[variables]
pkgname=numpy.core
pkgdir=/build/arm-linux-gnueabi/sysroot/usr/lib/python3.7/site-packages/numpy/core
prefix=${pkgdir}
libdir=${prefix}/lib
includedir=${prefix}/include
[default]
Libs=-L${libdir} -lnpymath
Cflags=-I${includedir}
Requires=mlib
[msvc]
Libs=/LIBPATH:${libdir} npymath.lib
Cflags=/INCLUDE:${includedir}
Requires=mlib
"""
if subst_dict is None:
subst_dict = {}
template = os.path.join(self.package_path, template)
if self.name in self.installed_pkg_config:
self.installed_pkg_config[self.name].append((template, install_dir,
subst_dict))
else:
self.installed_pkg_config[self.name] = [(template, install_dir,
subst_dict)]
def add_scripts(self,*files):
"""Add scripts to configuration.
Add the sequence of files to the beginning of the scripts list.
Scripts will be installed under the <prefix>/bin/ directory.
"""
scripts = self.paths(files)
dist = self.get_distribution()
if dist is not None:
if dist.scripts is None:
dist.scripts = []
dist.scripts.extend(scripts)
else:
self.scripts.extend(scripts)
def dict_append(self,**dict):
for key in self.list_keys:
a = getattr(self, key)
a.extend(dict.get(key, []))
for key in self.dict_keys:
a = getattr(self, key)
a.update(dict.get(key, {}))
known_keys = self.list_keys + self.dict_keys + self.extra_keys
for key in dict.keys():
if key not in known_keys:
a = getattr(self, key, None)
if a and a==dict[key]: continue
self.warn('Inheriting attribute %r=%r from %r' \
% (key, dict[key], dict.get('name', '?')))
setattr(self, key, dict[key])
self.extra_keys.append(key)
elif key in self.extra_keys:
self.info('Ignoring attempt to set %r (from %r to %r)' \
% (key, getattr(self, key), dict[key]))
elif key in known_keys:
# key is already processed above
pass
else:
raise ValueError("Don't know about key=%r" % (key))
def __str__(self):
from pprint import pformat
known_keys = self.list_keys + self.dict_keys + self.extra_keys
s = '<'+5*'-' + '\n'
s += 'Configuration of '+self.name+':\n'
known_keys.sort()
for k in known_keys:
a = getattr(self, k, None)
if a:
s += '%s = %s\n' % (k, pformat(a))
s += 5*'-' + '>'
return s
def get_config_cmd(self):
"""
Returns the numpy.distutils config command instance.
"""
cmd = get_cmd('config')
cmd.ensure_finalized()
cmd.dump_source = 0
cmd.noisy = 0
old_path = os.environ.get('PATH')
if old_path:
path = os.pathsep.join(['.', old_path])
os.environ['PATH'] = path
return cmd
def get_build_temp_dir(self):
"""
Return a path to a temporary directory where temporary files should be
placed.
"""
cmd = get_cmd('build')
cmd.ensure_finalized()
return cmd.build_temp
def have_f77c(self):
"""Check for availability of Fortran 77 compiler.
Use it inside source generating function to ensure that
setup distribution instance has been initialized.
Notes
-----
True if a Fortran 77 compiler is available (because a simple Fortran 77
code was able to be compiled successfully).
"""
simple_fortran_subroutine = '''
subroutine simple
end
'''
config_cmd = self.get_config_cmd()
flag = config_cmd.try_compile(simple_fortran_subroutine, lang='f77')
return flag
def have_f90c(self):
"""Check for availability of Fortran 90 compiler.
Use it inside source generating function to ensure that
setup distribution instance has been initialized.
Notes
-----
True if a Fortran 90 compiler is available (because a simple Fortran
90 code was able to be compiled successfully)
"""
simple_fortran_subroutine = '''
subroutine simple
end
'''
config_cmd = self.get_config_cmd()
flag = config_cmd.try_compile(simple_fortran_subroutine, lang='f90')
return flag
def append_to(self, extlib):
"""Append libraries, include_dirs to extension or library item.
"""
if is_sequence(extlib):
lib_name, build_info = extlib
dict_append(build_info,
libraries=self.libraries,
include_dirs=self.include_dirs)
else:
from numpy.distutils.core import Extension
assert isinstance(extlib, Extension), repr(extlib)
extlib.libraries.extend(self.libraries)
extlib.include_dirs.extend(self.include_dirs)
def _get_svn_revision(self, path):
"""Return path's SVN revision number.
"""
try:
output = subprocess.check_output(['svnversion'], cwd=path)
except (subprocess.CalledProcessError, OSError):
pass
else:
m = re.match(rb'(?P<revision>\d+)', output)
if m:
return int(m.group('revision'))
if sys.platform=='win32' and os.environ.get('SVN_ASP_DOT_NET_HACK', None):
entries = njoin(path, '_svn', 'entries')
else:
entries = njoin(path, '.svn', 'entries')
if os.path.isfile(entries):
with open(entries) as f:
fstr = f.read()
if fstr[:5] == '<?xml': # pre 1.4
m = re.search(r'revision="(?P<revision>\d+)"', fstr)
if m:
return int(m.group('revision'))
else: # non-xml entries file --- check to be sure that
m = re.search(r'dir[\n\r]+(?P<revision>\d+)', fstr)
if m:
return int(m.group('revision'))
return None
def _get_hg_revision(self, path):
"""Return path's Mercurial revision number.
"""
try:
output = subprocess.check_output(
['hg', 'identify', '--num'], cwd=path)
except (subprocess.CalledProcessError, OSError):
pass
else:
m = re.match(rb'(?P<revision>\d+)', output)
if m:
return int(m.group('revision'))
branch_fn = njoin(path, '.hg', 'branch')
branch_cache_fn = njoin(path, '.hg', 'branch.cache')
if os.path.isfile(branch_fn):
branch0 = None
with open(branch_fn) as f:
revision0 = f.read().strip()
branch_map = {}
for line in file(branch_cache_fn, 'r'):
branch1, revision1 = line.split()[:2]
if revision1==revision0:
branch0 = branch1
try:
revision1 = int(revision1)
except ValueError:
continue
branch_map[branch1] = revision1
return branch_map.get(branch0)
return None
def get_version(self, version_file=None, version_variable=None):
"""Try to get version string of a package.
Return a version string of the current package or None if the version
information could not be detected.
Notes
-----
This method scans files named
__version__.py, <packagename>_version.py, version.py, and
__svn_version__.py for string variables version, __version__, and
<packagename>_version, until a version number is found.
"""
version = getattr(self, 'version', None)
if version is not None:
return version
# Get version from version file.
if version_file is None:
files = ['__version__.py',
self.name.split('.')[-1]+'_version.py',
'version.py',
'__svn_version__.py',
'__hg_version__.py']
else:
files = [version_file]
if version_variable is None:
version_vars = ['version',
'__version__',
self.name.split('.')[-1]+'_version']
else:
version_vars = [version_variable]
for f in files:
fn = njoin(self.local_path, f)
if os.path.isfile(fn):
info = ('.py', 'U', 1)
name = os.path.splitext(os.path.basename(fn))[0]
n = dot_join(self.name, name)
try:
version_module = npy_load_module('_'.join(n.split('.')),
fn, info)
except ImportError as e:
self.warn(str(e))
version_module = None
if version_module is None:
continue
for a in version_vars:
version = getattr(version_module, a, None)
if version is not None:
break
if version is not None:
break
if version is not None:
self.version = version
return version
# Get version as SVN or Mercurial revision number
revision = self._get_svn_revision(self.local_path)
if revision is None:
revision = self._get_hg_revision(self.local_path)
if revision is not None:
version = str(revision)
self.version = version
return version
def make_svn_version_py(self, delete=True):
"""Appends a data function to the data_files list that will generate
__svn_version__.py file to the current package directory.
Generate package __svn_version__.py file from SVN revision number,
it will be removed after python exits but will be available
when sdist, etc commands are executed.
Notes
-----
If __svn_version__.py existed before, nothing is done.
This is
intended for working with source directories that are in an SVN
repository.
"""
target = njoin(self.local_path, '__svn_version__.py')
revision = self._get_svn_revision(self.local_path)
if os.path.isfile(target) or revision is None:
return
else:
def generate_svn_version_py():
if not os.path.isfile(target):
version = str(revision)
self.info('Creating %s (version=%r)' % (target, version))
with open(target, 'w') as f:
f.write('version = %r\n' % (version))
def rm_file(f=target,p=self.info):
if delete:
try: os.remove(f); p('removed '+f)
except OSError: pass
try: os.remove(f+'c'); p('removed '+f+'c')
except OSError: pass
atexit.register(rm_file)
return target
self.add_data_files(('', generate_svn_version_py()))
def make_hg_version_py(self, delete=True):
"""Appends a data function to the data_files list that will generate
__hg_version__.py file to the current package directory.
Generate package __hg_version__.py file from Mercurial revision,
it will be removed after python exits but will be available
when sdist, etc commands are executed.
Notes
-----
If __hg_version__.py existed before, nothing is done.
This is intended for working with source directories that are
in an Mercurial repository.
"""
target = njoin(self.local_path, '__hg_version__.py')
revision = self._get_hg_revision(self.local_path)
if os.path.isfile(target) or revision is None:
return
else:
def generate_hg_version_py():
if not os.path.isfile(target):
version = str(revision)
self.info('Creating %s (version=%r)' % (target, version))
with open(target, 'w') as f:
f.write('version = %r\n' % (version))
def rm_file(f=target,p=self.info):
if delete:
try: os.remove(f); p('removed '+f)
except OSError: pass
try: os.remove(f+'c'); p('removed '+f+'c')
except OSError: pass
atexit.register(rm_file)
return target
self.add_data_files(('', generate_hg_version_py()))
def make_config_py(self,name='__config__'):
"""Generate package __config__.py file containing system_info
information used during building the package.
This file is installed to the
package installation directory.
"""
self.py_modules.append((self.name, name, generate_config_py))
def get_info(self,*names):
"""Get resources information.
Return information (from system_info.get_info) for all of the names in
the argument list in a single dictionary.
"""
from .system_info import get_info, dict_append
info_dict = {}
for a in names:
dict_append(info_dict,**get_info(a))
return info_dict
def get_cmd(cmdname, _cache={}):
if cmdname not in _cache:
import distutils.core
dist = distutils.core._setup_distribution
if dist is None:
from distutils.errors import DistutilsInternalError
raise DistutilsInternalError(
'setup distribution instance not initialized')
cmd = dist.get_command_obj(cmdname)
_cache[cmdname] = cmd
return _cache[cmdname]
def get_numpy_include_dirs():
# numpy_include_dirs are set by numpy/core/setup.py, otherwise []
include_dirs = Configuration.numpy_include_dirs[:]
if not include_dirs:
import numpy
include_dirs = [ numpy.get_include() ]
# else running numpy/core/setup.py
return include_dirs
def get_npy_pkg_dir():
"""Return the path where to find the npy-pkg-config directory.
If the NPY_PKG_CONFIG_PATH environment variable is set, the value of that
is returned. Otherwise, a path inside the location of the numpy module is
returned.
The NPY_PKG_CONFIG_PATH can be useful when cross-compiling, maintaining
customized npy-pkg-config .ini files for the cross-compilation
environment, and using them when cross-compiling.
"""
# XXX: import here for bootstrapping reasons
import numpy
d = os.environ.get('NPY_PKG_CONFIG_PATH')
if d is not None:
return d
d = os.path.join(os.path.dirname(numpy.__file__),
'core', 'lib', 'npy-pkg-config')
return d
def get_pkg_info(pkgname, dirs=None):
"""
Return library info for the given package.
Parameters
----------
pkgname : str
Name of the package (should match the name of the .ini file, without
the extension, e.g. foo for the file foo.ini).
dirs : sequence, optional
If given, should be a sequence of additional directories where to look
for npy-pkg-config files. Those directories are searched prior to the
NumPy directory.
Returns
-------
pkginfo : class instance
The `LibraryInfo` instance containing the build information.
Raises
------
PkgNotFound
If the package is not found.
See Also
--------
Configuration.add_npy_pkg_config, Configuration.add_installed_library,
get_info
"""
from numpy.distutils.npy_pkg_config import read_config
if dirs:
dirs.append(get_npy_pkg_dir())
else:
dirs = [get_npy_pkg_dir()]
return read_config(pkgname, dirs)
def get_info(pkgname, dirs=None):
"""
Return an info dict for a given C library.
The info dict contains the necessary options to use the C library.
Parameters
----------
pkgname : str
Name of the package (should match the name of the .ini file, without
the extension, e.g. foo for the file foo.ini).
dirs : sequence, optional
If given, should be a sequence of additional directories where to look
for npy-pkg-config files. Those directories are searched prior to the
NumPy directory.
Returns
-------
info : dict
The dictionary with build information.
Raises
------
PkgNotFound
If the package is not found.
See Also
--------
Configuration.add_npy_pkg_config, Configuration.add_installed_library,
get_pkg_info
Examples
--------
To get the necessary information for the npymath library from NumPy:
>>> npymath_info = np.distutils.misc_util.get_info('npymath')
>>> npymath_info #doctest: +SKIP
{'define_macros': [], 'libraries': ['npymath'], 'library_dirs':
['.../numpy/core/lib'], 'include_dirs': ['.../numpy/core/include']}
This info dict can then be used as input to a `Configuration` instance::
config.add_extension('foo', sources=['foo.c'], extra_info=npymath_info)
"""
from numpy.distutils.npy_pkg_config import parse_flags
pkg_info = get_pkg_info(pkgname, dirs)
# Translate LibraryInfo instance into a build_info dict
info = parse_flags(pkg_info.cflags())
for k, v in parse_flags(pkg_info.libs()).items():
info[k].extend(v)
# add_extension extra_info argument is ANAL
info['define_macros'] = info['macros']
del info['macros']
del info['ignored']
return info
def is_bootstrapping():
import builtins
try:
builtins.__NUMPY_SETUP__
return True
except AttributeError:
return False
#########################
def default_config_dict(name = None, parent_name = None, local_path=None):
"""Return a configuration dictionary for usage in
configuration() function defined in file setup_<name>.py.
"""
import warnings
warnings.warn('Use Configuration(%r,%r,top_path=%r) instead of '\
'deprecated default_config_dict(%r,%r,%r)'
% (name, parent_name, local_path,
name, parent_name, local_path,
), stacklevel=2)
c = Configuration(name, parent_name, local_path)
return c.todict()
def dict_append(d, **kws):
for k, v in kws.items():
if k in d:
ov = d[k]
if isinstance(ov, str):
d[k] = v
else:
d[k].extend(v)
else:
d[k] = v
def appendpath(prefix, path):
if os.path.sep != '/':
prefix = prefix.replace('/', os.path.sep)
path = path.replace('/', os.path.sep)
drive = ''
if os.path.isabs(path):
drive = os.path.splitdrive(prefix)[0]
absprefix = os.path.splitdrive(os.path.abspath(prefix))[1]
pathdrive, path = os.path.splitdrive(path)
d = os.path.commonprefix([absprefix, path])
if os.path.join(absprefix[:len(d)], absprefix[len(d):]) != absprefix \
or os.path.join(path[:len(d)], path[len(d):]) != path:
# Handle invalid paths
d = os.path.dirname(d)
subpath = path[len(d):]
if os.path.isabs(subpath):
subpath = subpath[1:]
else:
subpath = path
return os.path.normpath(njoin(drive + prefix, subpath))
def generate_config_py(target):
"""Generate config.py file containing system_info information
used during building the package.
Usage:
config['py_modules'].append((packagename, '__config__',generate_config_py))
"""
from numpy.distutils.system_info import system_info
from distutils.dir_util import mkpath
mkpath(os.path.dirname(target))
with open(target, 'w') as f:
f.write('# This file is generated by numpy\'s %s\n' % (os.path.basename(sys.argv[0])))
f.write('# It contains system_info results at the time of building this package.\n')
f.write('__all__ = ["get_info","show"]\n\n')
# For gfortran+msvc combination, extra shared libraries may exist
f.write(textwrap.dedent("""
import os
import sys
extra_dll_dir = os.path.join(os.path.dirname(__file__), '.libs')
if sys.platform == 'win32' and os.path.isdir(extra_dll_dir):
if sys.version_info >= (3, 8):
os.add_dll_directory(extra_dll_dir)
else:
os.environ.setdefault('PATH', '')
os.environ['PATH'] += os.pathsep + extra_dll_dir
"""))
for k, i in system_info.saved_results.items():
f.write('%s=%r\n' % (k, i))
f.write(textwrap.dedent(r'''
def get_info(name):
g = globals()
return g.get(name, g.get(name + "_info", {}))
def show():
"""
Show libraries in the system on which NumPy was built.
Print information about various resources (libraries, library
directories, include directories, etc.) in the system on which
NumPy was built.
See Also
--------
get_include : Returns the directory containing NumPy C
header files.
Notes
-----
Classes specifying the information to be printed are defined
in the `numpy.distutils.system_info` module.
Information may include:
* ``language``: language used to write the libraries (mostly
C or f77)
* ``libraries``: names of libraries found in the system
* ``library_dirs``: directories containing the libraries
* ``include_dirs``: directories containing library header files
* ``src_dirs``: directories containing library source files
* ``define_macros``: preprocessor macros used by
``distutils.setup``
Examples
--------
>>> np.show_config()
blas_opt_info:
language = c
define_macros = [('HAVE_CBLAS', None)]
libraries = ['openblas', 'openblas']
library_dirs = ['/usr/local/lib']
"""
for name,info_dict in globals().items():
if name[0] == "_" or type(info_dict) is not type({}): continue
print(name + ":")
if not info_dict:
print(" NOT AVAILABLE")
for k,v in info_dict.items():
v = str(v)
if k == "sources" and len(v) > 200:
v = v[:60] + " ...\n... " + v[-60:]
print(" %s = %s" % (k,v))
'''))
return target
def msvc_version(compiler):
"""Return version major and minor of compiler instance if it is
MSVC, raise an exception otherwise."""
if not compiler.compiler_type == "msvc":
raise ValueError("Compiler instance is not msvc (%s)"\
% compiler.compiler_type)
return compiler._MSVCCompiler__version
def get_build_architecture():
# Importing distutils.msvccompiler triggers a warning on non-Windows
# systems, so delay the import to here.
from distutils.msvccompiler import get_build_architecture
return get_build_architecture()
|
{
"content_hash": "f20f4239c7f56c86a30376b7dd6005da",
"timestamp": "",
"source": "github",
"line_count": 2391,
"max_line_length": 102,
"avg_line_length": 35.72605604349644,
"alnum_prop": 0.5438592383605905,
"repo_name": "WarrenWeckesser/numpy",
"id": "9f9e9f1ac44a12a1f866d12b39c3e84ad05df039",
"size": "85421",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "numpy/distutils/misc_util.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "9059444"
},
{
"name": "C++",
"bytes": "174989"
},
{
"name": "Fortran",
"bytes": "10884"
},
{
"name": "JavaScript",
"bytes": "16928"
},
{
"name": "Makefile",
"bytes": "4290"
},
{
"name": "Python",
"bytes": "8313055"
},
{
"name": "Shell",
"bytes": "9612"
},
{
"name": "sed",
"bytes": "5741"
}
],
"symlink_target": ""
}
|
from s4 import utils
from s4.commands import Command
class RmCommand(Command):
def run(self):
if "targets" not in self.config:
self.logger.info("You have not added any targets yet")
return
if self.args.target not in self.config["targets"]:
all_targets = sorted(list(self.config["targets"].keys()))
self.logger.info('"%s" is an unknown target', self.args.target)
self.logger.info("Choices are: %s", all_targets)
return
del self.config["targets"][self.args.target]
utils.set_config(self.config)
|
{
"content_hash": "d72c768a4615d1f4ac7e78994598d699",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 75,
"avg_line_length": 35.529411764705884,
"alnum_prop": 0.6142384105960265,
"repo_name": "MichaelAquilina/s3backup",
"id": "a8e4d0305f0704162e16fdd765ee2f9541b73610",
"size": "632",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "s4/commands/rm_command.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "101356"
},
{
"name": "Shell",
"bytes": "42"
}
],
"symlink_target": ""
}
|
import Gaffer
import GafferUI
QtGui = GafferUI._qtImport( "QtGui" )
## A simple PlugValueWidget which just displays the name of the plug,
# with the popup action menu for the plug.
#
# Supported plug metadata :
#
# - "labelPlugValueWidget:renameable"
class LabelPlugValueWidget( GafferUI.PlugValueWidget ) :
def __init__( self, plug, horizontalAlignment=GafferUI.Label.HorizontalAlignment.Left, verticalAlignment=GafferUI.Label.VerticalAlignment.Center, **kw ) :
GafferUI.PlugValueWidget.__init__( self, QtGui.QWidget(), plug, **kw )
layout = QtGui.QHBoxLayout()
layout.setContentsMargins( 0, 0, 0, 0 )
layout.setSizeConstraint( QtGui.QLayout.SetMinAndMaxSize )
self._qtWidget().setLayout( layout )
self.__label = GafferUI.NameLabel(
plug,
horizontalAlignment = horizontalAlignment,
verticalAlignment = verticalAlignment,
)
self.__label._qtWidget().setObjectName( "gafferPlugLabel" )
layout.addWidget( self.__label._qtWidget() )
self.__editableLabel = None # we'll make this lazily as needed
# connecting at group 0 so we're called before the slots
# connected by the NameLabel class.
self.__dragBeginConnection = self.__label.dragBeginSignal().connect( 0, Gaffer.WeakMethod( self.__dragBegin ) )
self.__dragEndConnection = self.__label.dragEndSignal().connect( 0, Gaffer.WeakMethod( self.__dragEnd ) )
self.__plugMetadataChangedConnection = Gaffer.Metadata.plugValueChangedSignal().connect( Gaffer.WeakMethod( self.__plugMetadataChanged ) )
self._addPopupMenu( self.__label )
self.setPlug( plug )
def label( self ) :
return self.__label
def setPlug( self, plug ) :
GafferUI.PlugValueWidget.setPlug( self, plug )
self.__label.setGraphComponent( plug )
if self.__editableLabel is not None :
self.__editableLabel.setGraphComponent( plug )
self.__updateFormatter()
self.__updateDoubleClickConnection()
def setHighlighted( self, highlighted ) :
GafferUI.PlugValueWidget.setHighlighted( self, highlighted )
self.__label.setHighlighted( highlighted )
def getToolTip( self ) :
result = GafferUI.PlugValueWidget.getToolTip( self )
if self.getPlug() is not None :
result += "<ul>"
result += "<li>Left drag to connect</li>"
if hasattr( self.getPlug(), "getValue" ) :
result += "<li>Shift-left or middle drag to transfer value</li>"
result += "<ul>"
return result
def _updateFromPlug( self ) :
plug = self.getPlug()
valueChanged = plug.getInput() is not None
if not valueChanged and isinstance( plug, Gaffer.ValuePlug ) :
with self.getContext() :
if Gaffer.NodeAlgo.hasUserDefault( plug ) :
try:
valueChanged = not Gaffer.NodeAlgo.isSetToUserDefault( plug )
except:
# an error here should not cause the ui to break, specially since the value widget corresponding could be indicating the error itself
valueChanged = True
else :
try:
valueChanged = not plug.isSetToDefault()
except:
# an error here should not cause the ui to break, specially since the value widget corresponding could be indicating the error itself
valueChanged = True
self.__setValueChanged( valueChanged )
# Sets whether or not the label be rendered in a ValueChanged state.
def __setValueChanged( self, valueChanged ) :
if valueChanged == self.__getValueChanged() :
return
self.__label._qtWidget().setProperty( "gafferValueChanged", GafferUI._Variant.toVariant( valueChanged ) )
self.__label._repolish()
def __getValueChanged( self ) :
if "gafferValueChanged" not in self.__label._qtWidget().dynamicPropertyNames() :
return False
return GafferUI._Variant.fromVariant( self.__label._qtWidget().property( "gafferValueChanged" ) )
def __dragBegin( self, widget, event ) :
# initiate a drag containing the value of the plug
# for shift-left drag or a middle drag. initiate a
# drag containing the plug for a straight left-drag.
shift = event.modifiers & event.Modifiers.Shift
left = event.buttons == event.Buttons.Left
middle = event.buttons == event.Buttons.Middle
if ( shift and left ) or middle :
if not hasattr( self.getPlug(), "getValue" ) :
return None
GafferUI.Pointer.setCurrent( "values" )
with self.getContext() :
return self.getPlug().getValue()
elif left :
GafferUI.Pointer.setCurrent( "plug" )
return self.getPlug()
def __dragEnd( self, widget, event ) :
GafferUI.Pointer.setCurrent( None )
def __updateFormatter( self ) :
plug = self.getPlug()
label = Gaffer.Metadata.value( plug, "label" ) if plug is not None else None
if label is not None :
self.__label.setFormatter( lambda graphComponents : label )
else :
self.__label.setFormatter( self.__label.defaultFormatter )
def __updateDoubleClickConnection( self ) :
self.__labelDoubleClickConnection = None
if self.getPlug() is None or not Gaffer.Metadata.value( self.getPlug(), "labelPlugValueWidget:renameable" ) :
return
self.__labelDoubleClickConnection = self.__label.buttonDoubleClickSignal().connect( Gaffer.WeakMethod( self.__labelDoubleClicked ) )
def __labelDoubleClicked( self, label, event ) :
assert( label is self.__label )
if self.getPlug().getFlags( Gaffer.Plug.Flags.ReadOnly ) or Gaffer.readOnly( self.getPlug() ) :
return
if self.__editableLabel is None :
self.__editableLabel = GafferUI.NameWidget( self.getPlug() )
self.__editableLabel._qtWidget().setMinimumSize( self.label()._qtWidget().minimumSize() )
self.__editableLabel._qtWidget().setMaximumSize( self.label()._qtWidget().maximumSize() )
# Connect at group 0 so we're called before the NameWidget's own slots.
self.__labelEditingFinishedConnection = self.__editableLabel.editingFinishedSignal().connect( 0, Gaffer.WeakMethod( self.__labelEditingFinished ) )
self._qtWidget().layout().insertWidget( 0, self.__editableLabel._qtWidget() )
self.__label.setVisible( False )
self.__editableLabel.setVisible( True )
self.__editableLabel.setSelection( 0, len( self.__editableLabel.getText() ) )
self.__editableLabel.grabFocus()
def __labelEditingFinished( self, nameWidget ) :
with Gaffer.UndoContext( self.getPlug().ancestor( Gaffer.ScriptNode ) ) :
# Do what the NameWidget would have done for us anyway, so we
# can group it with the metadata deregistration in the undo queue.
self.getPlug().setName( nameWidget.getText() )
# Remove any metadata label which would mask the name - if a user
# has gone to the trouble of setting a sensible name, then it should
# take precedence.
Gaffer.Metadata.deregisterValue( self.getPlug(), "label" )
self.__label.setVisible( True )
self.__editableLabel.setVisible( False )
# Return True so that the NameWidget's handler isn't run, since we
# did all the work ourselves.
return True
def __plugMetadataChanged( self, nodeTypeId, plugPath, key, plug ) :
if self.getPlug() is None :
return
if key=="label" and Gaffer.affectedByChange( self.getPlug(), nodeTypeId, plugPath, plug ) :
self.__updateFormatter()
|
{
"content_hash": "7c1ca048a5c0f1d42737fb67c4a978fc",
"timestamp": "",
"source": "github",
"line_count": 201,
"max_line_length": 155,
"avg_line_length": 34.92537313432836,
"alnum_prop": 0.7169515669515669,
"repo_name": "chippey/gaffer",
"id": "59774faf3a6edfd0cec6f615b616dc7d38d8bd16",
"size": "8828",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/GafferUI/LabelPlugValueWidget.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "2258"
},
{
"name": "C++",
"bytes": "5420141"
},
{
"name": "CSS",
"bytes": "28027"
},
{
"name": "GLSL",
"bytes": "6250"
},
{
"name": "Objective-C",
"bytes": "2228"
},
{
"name": "Python",
"bytes": "5348174"
},
{
"name": "Shell",
"bytes": "8370"
},
{
"name": "Slash",
"bytes": "41159"
}
],
"symlink_target": ""
}
|
import base64
import errno
import httplib
import json
import socket
import sys
import time
def do(method, connection, headers, path, body=None):
connection.request(method, path, headers=headers, body=json.dumps(body))
resp = connection.getresponse()
content = resp.read()
if resp.status != 200:
raise IOError("Unexpected HTTP status received on %s: %d" % (path, resp.status))
return json.loads(content)
connection = httplib.HTTPConnection("localhost")
# try to connect, multiple times if ECONNREFUSED is raised
# (service is up but not ready for requests yet)
for retries in range(0,10):
try:
connection.connect()
except socket.error as e:
if e.errno != errno.ECONNREFUSED:
raise e
print("Connection refused, retrying...")
time.sleep(1)
token = base64.b64encode("admin:admin".encode("ASCII")).decode("ascii")
headers = {
"Authorization" : "Basic %s" % token,
"Content-Type" : "application/json; charset=utf8"
}
datasources = do("GET", connection, headers, "/api/datasources")
if "Prometheus on localhost" not in map(lambda d: d["name"], datasources):
do("POST", connection, headers, "/api/datasources", {
"name" : "Prometheus on localhost",
"type" : "prometheus",
"url" : "http://localhost:9090/",
"access" : "proxy",
"basicAuth" : False,
"isDefault": True,
})
dashboards = do("GET", connection, headers, "/api/search")
suse_manager_dashboards = filter(lambda d: d["title"] == "SUSE Manager Server", dashboards)
with open('/opt/grafana/conf/suse_manager.json', 'r') as content_file:
dashboard = json.loads(content_file.read())
dashboard["id"] = suse_manager_dashboards[0]["id"] if suse_manager_dashboards else None
do("POST", connection, headers, "/api/dashboards/db", { "dashboard" : dashboard, "overwrite" : True })
|
{
"content_hash": "0e9f93265d15482e848c9533fa06088a",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 106,
"avg_line_length": 32.80701754385965,
"alnum_prop": 0.6663101604278074,
"repo_name": "MalloZup/sumaform",
"id": "68d33a1742c4304b63b424912ac543853a763ec1",
"size": "1893",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "salt/grafana/setup_grafana.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "HCL",
"bytes": "119582"
},
{
"name": "HTML",
"bytes": "1699"
},
{
"name": "Python",
"bytes": "32501"
},
{
"name": "Ruby",
"bytes": "1808"
},
{
"name": "SaltStack",
"bytes": "84579"
},
{
"name": "Scheme",
"bytes": "552"
},
{
"name": "Shell",
"bytes": "7543"
}
],
"symlink_target": ""
}
|
from cloudify import ctx
from cloudify.exceptions import NonRecoverableError
from cloudify.state import ctx_parameters as inputs
import subprocess
import os
import re
import sys
import time
import threading
import platform
from StringIO import StringIO
from cloudify_rest_client import CloudifyClient
from cloudify import utils
if 'MANAGER_REST_PROTOCOL' in os.environ and os.environ['MANAGER_REST_PROTOCOL'] == "https":
client = CloudifyClient(host=utils.get_manager_ip(), port=utils.get_manager_rest_service_port(), protocol='https', trust_all=True)
else:
client = CloudifyClient(host=utils.get_manager_ip(), port=utils.get_manager_rest_service_port())
def convert_env_value_to_string(envDict):
for key, value in envDict.items():
envDict[str(key)] = str(envDict.pop(key))
def get_attribute_user(ctx):
if get_attribute(ctx, 'user'):
return get_attribute(ctx, 'user')
else:
return get_attribute(ctx, 'cloudify_agent')['user']
def get_attribute_key(ctx):
if get_attribute(ctx, 'key'):
return get_attribute(ctx, 'key')
else:
return get_attribute(ctx, 'cloudify_agent')['key']
def get_host(entity):
if entity.instance.relationships:
for relationship in entity.instance.relationships:
if 'cloudify.relationships.contained_in' in relationship.type_hierarchy:
return relationship.target
return None
def has_attribute_mapping(entity, attribute_name):
ctx.logger.info('Check if it exists mapping for attribute {0} in {1}'.format(attribute_name, entity.node.properties))
mapping_configuration = entity.node.properties.get('_a4c_att_' + attribute_name, None)
if mapping_configuration is not None:
if mapping_configuration['parameters'][0] == 'SELF' and mapping_configuration['parameters'][1] == attribute_name:
return False
else:
return True
return False
def process_attribute_mapping(entity, attribute_name, data_retriever_function):
# This is where attribute mapping is defined in the cloudify type
mapping_configuration = entity.node.properties['_a4c_att_' + attribute_name]
ctx.logger.info('Mapping configuration found for attribute {0} is {1}'.format(attribute_name, mapping_configuration))
# If the mapping configuration exist and if it concerns SELF then just get attribute of the mapped attribute name
# Else if it concerns TARGET then follow the relationship and retrieved the mapped attribute name from the TARGET
if mapping_configuration['parameters'][0] == 'SELF':
return data_retriever_function(entity, mapping_configuration['parameters'][1])
elif mapping_configuration['parameters'][0] == 'TARGET' and entity.instance.relationships:
for relationship in entity.instance.relationships:
if mapping_configuration['parameters'][1] in relationship.type_hierarchy:
return data_retriever_function(relationship.target, mapping_configuration['parameters'][2])
return ""
def get_nested_attribute(entity, attribute_names):
deep_properties = get_attribute(entity, attribute_names[0])
attribute_names_iter = iter(attribute_names)
next(attribute_names_iter)
for attribute_name in attribute_names_iter:
if deep_properties is None:
return ""
else:
deep_properties = deep_properties.get(attribute_name, None)
return deep_properties
def _all_instances_get_nested_attribute(entity, attribute_names):
return None
def get_attribute(entity, attribute_name):
if has_attribute_mapping(entity, attribute_name):
# First check if any mapping exist for attribute
mapped_value = process_attribute_mapping(entity, attribute_name, get_attribute)
ctx.logger.info('Mapping exists for attribute {0} with value {1}'.format(attribute_name, mapped_value))
return mapped_value
# No mapping exist, try to get directly the attribute from the entity
attribute_value = entity.instance.runtime_properties.get(attribute_name, None)
if attribute_value is not None:
ctx.logger.info('Found the attribute {0} with value {1} on the node {2}'.format(attribute_name, attribute_value, entity.node.id))
return attribute_value
# Attribute retrieval fails, fall back to property
property_value = entity.node.properties.get(attribute_name, None)
if property_value is not None:
return property_value
# Property retrieval fails, fall back to host instance
host = get_host(entity)
if host is not None:
ctx.logger.info('Attribute not found {0} go up to the parent node {1}'.format(attribute_name, host.node.id))
return get_attribute(host, attribute_name)
# Nothing is found
return ""
def _all_instances_get_attribute(entity, attribute_name):
result_map = {}
# get all instances data using cfy rest client
# we have to get the node using the rest client with node_instance.node_id
# then we will have the relationships
node = client.nodes.get(ctx.deployment.id, entity.node.id)
all_node_instances = client.node_instances.list(ctx.deployment.id, entity.node.id)
for node_instance in all_node_instances:
prop_value = __recursively_get_instance_data(node, node_instance, attribute_name)
if prop_value is not None:
ctx.logger.info('Found the property/attribute {0} with value {1} on the node {2} instance {3}'.format(attribute_name, prop_value, entity.node.id,
node_instance.id))
result_map[node_instance.id + '_'] = prop_value
return result_map
def get_property(entity, property_name):
# Try to get the property value on the node
property_value = entity.node.properties.get(property_name, None)
if property_value is not None:
ctx.logger.info('Found the property {0} with value {1} on the node {2}'.format(property_name, property_value, entity.node.id))
return property_value
# No property found on the node, fall back to the host
host = get_host(entity)
if host is not None:
ctx.logger.info('Property not found {0} go up to the parent node {1}'.format(property_name, host.node.id))
return get_property(host, property_name)
return ""
def get_instance_list(node_id):
result = ''
all_node_instances = client.node_instances.list(ctx.deployment.id, node_id)
for node_instance in all_node_instances:
if len(result) > 0:
result += ','
result += node_instance.id
return result
def get_host_node_name(instance):
for relationship in instance.relationships:
if 'cloudify.relationships.contained_in' in relationship.type_hierarchy:
return relationship.target.node.id
return None
def __get_relationship(node, target_name, relationship_type):
for relationship in node.relationships:
if relationship.get('target_id') == target_name and relationship_type in relationship.get('type_hierarchy'):
return relationship
return None
def __has_attribute_mapping(node, attribute_name):
ctx.logger.info('Check if it exists mapping for attribute {0} in {1}'.format(attribute_name, node.properties))
mapping_configuration = node.properties.get('_a4c_att_' + attribute_name, None)
if mapping_configuration is not None:
if mapping_configuration['parameters'][0] == 'SELF' and mapping_configuration['parameters'][1] == attribute_name:
return False
else:
return True
return False
def __process_attribute_mapping(node, node_instance, attribute_name, data_retriever_function):
# This is where attribute mapping is defined in the cloudify type
mapping_configuration = node.properties['_a4c_att_' + attribute_name]
ctx.logger.info('Mapping configuration found for attribute {0} is {1}'.format(attribute_name, mapping_configuration))
# If the mapping configuration exist and if it concerns SELF then just get attribute of the mapped attribute name
# Else if it concerns TARGET then follow the relationship and retrieved the mapped attribute name from the TARGET
if mapping_configuration['parameters'][0] == 'SELF':
return data_retriever_function(node, node_instance, mapping_configuration['parameters'][1])
elif mapping_configuration['parameters'][0] == 'TARGET' and node_instance.relationships:
for rel in node_instance.relationships:
relationship = __get_relationship(node, rel.get('target_name'), rel.get('type'))
if mapping_configuration['parameters'][1] in relationship.get('type_hierarchy'):
target_instance = client.node_instances.get(rel.get('target_id'))
target_node = client.nodes.get(ctx.deployment.id, target_instance.node_id)
return data_retriever_function(target_node, target_instance, mapping_configuration['parameters'][2])
return None
def __recursively_get_instance_data(node, node_instance, attribute_name):
if __has_attribute_mapping(node, attribute_name):
return __process_attribute_mapping(node, node_instance, attribute_name, __recursively_get_instance_data)
attribute_value = node_instance.runtime_properties.get(attribute_name, None)
if attribute_value is not None:
return attribute_value
elif node_instance.relationships:
for rel in node_instance.relationships:
# on rel we have target_name, target_id (instanceId), type
relationship = __get_relationship(node, rel.get('target_name'), rel.get('type'))
if 'cloudify.relationships.contained_in' in relationship.get('type_hierarchy'):
parent_instance = client.node_instances.get(rel.get('target_id'))
parent_node = client.nodes.get(ctx.deployment.id, parent_instance.node_id)
return __recursively_get_instance_data(parent_node, parent_instance, attribute_name)
return None
else:
return None
env_map = {}
env_map['NODE'] = ctx.node.id
env_map['INSTANCE'] = ctx.instance.id
env_map['INSTANCES'] = get_instance_list(ctx.node.id)
env_map['HOST'] = get_host_node_name(ctx.instance)
env_map['A4C_EXECUTION_HOST'] = get_attribute(ctx, 'ip_address')
env_map['A4C_EXECUTION_USER'] = get_attribute_user(ctx)
env_map['A4C_EXECUTION_KEY'] = get_attribute_key(ctx)
env_map['TOMCAT_HOME'] = r'/opt/tomcat'
if inputs.get('process', None) is not None and inputs['process'].get('env', None) is not None:
ctx.logger.info('Operation is executed with environment variable {0}'.format(inputs['process']['env']))
env_map.update(inputs['process']['env'])
def parse_output(output):
# by convention, the last output is the result of the operation
last_output = None
outputs = {}
pattern = re.compile('EXPECTED_OUTPUT_(\w+)=(.*)')
for line in output.splitlines():
match = pattern.match(line)
if match is None:
last_output = line
else:
output_name = match.group(1)
output_value = match.group(2)
outputs[output_name] = output_value
return {'last_output': last_output, 'outputs': outputs}
def execute(script_path, process, outputNames, command_prefix=None, cwd=None):
os.chmod(script_path, 0755)
on_posix = 'posix' in sys.builtin_module_names
env = os.environ.copy()
process_env = process.get('env', {})
env.update(process_env)
if outputNames is not None:
env['EXPECTED_OUTPUTS'] = outputNames
if platform.system() == 'Windows':
wrapper_path = ctx.download_resource("scriptWrapper.bat")
else:
wrapper_path = ctx.download_resource("scriptWrapper.sh")
os.chmod(wrapper_path, 0755)
command = '{0} {1}'.format(wrapper_path, script_path)
else:
command = script_path
if command_prefix is not None:
command = "{0} {1}".format(command_prefix, command)
ctx.logger.info('Executing: {0} in env {1}'.format(command, env))
process = subprocess.Popen(command,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=env,
cwd=cwd,
bufsize=1,
close_fds=on_posix)
return_code = None
stdout_consumer = OutputConsumer(process.stdout)
stderr_consumer = OutputConsumer(process.stderr)
while True:
return_code = process.poll()
if return_code is not None:
break
time.sleep(0.1)
stdout_consumer.join()
stderr_consumer.join()
parsed_output = parse_output(stdout_consumer.buffer.getvalue())
if outputNames is not None:
outputNameList = outputNames.split(';')
for outputName in outputNameList:
ctx.logger.info('Ouput name: {0} value : {1}'.format(outputName, parsed_output['outputs'].get(outputName, None)))
if return_code != 0:
error_message = "Script {0} encountered error with return code {1} and standard output {2}, error output {3}".format(command, return_code,
stdout_consumer.buffer.getvalue(),
stderr_consumer.buffer.getvalue())
error_message = str(unicode(error_message, errors='ignore'))
ctx.logger.error(error_message)
raise NonRecoverableError(error_message)
else:
ok_message = "Script {0} executed normally with standard output {1} and error output {2}".format(command, stdout_consumer.buffer.getvalue(),
stderr_consumer.buffer.getvalue())
ok_message = str(unicode(ok_message, errors='ignore'))
ctx.logger.info(ok_message)
return parsed_output
class OutputConsumer(object):
def __init__(self, out):
self.out = out
self.buffer = StringIO()
self.consumer = threading.Thread(target=self.consume_output)
self.consumer.daemon = True
self.consumer.start()
def consume_output(self):
for line in iter(self.out.readline, b''):
self.buffer.write(line)
self.out.close()
def join(self):
self.consumer.join()
new_script_process = {'env': env_map}
operationOutputNames = None
convert_env_value_to_string(new_script_process['env'])
parsed_output = execute(ctx.download_resource('_a4c_impl_artifact/Tomcat/tosca.interfaces.node.lifecycle.Standard/stop/tomcat_stop.sh'), new_script_process, operationOutputNames)
outputs = parsed_output['outputs'].items()
for k,v in outputs:
ctx.logger.info('Output name: {0} value: {1}'.format(k, v))
ctx.instance.runtime_properties['_a4c_OO:tosca.interfaces.node.lifecycle.Standard:stop:{0}'.format(k)] = v
ctx.instance.runtime_properties['server_url'] = r'http://' + get_attribute(ctx, 'public_ip_address') + r':' + r'80'
ctx.instance.update()
|
{
"content_hash": "1ec7868fc3380fc33056c3dae4ebb7e9",
"timestamp": "",
"source": "github",
"line_count": 342,
"max_line_length": 178,
"avg_line_length": 44.76608187134503,
"alnum_prop": 0.6583278902677988,
"repo_name": "alien4cloud/alien4cloud-cloudify3-provider",
"id": "cc57042669592ebd930c1aebe113505cb6e29c93",
"size": "15313",
"binary": false,
"copies": "6",
"ref": "refs/heads/develop",
"path": "alien4cloud-cloudify3-provider/src/test/resources/outputs/blueprints/openstack/artifact_test/wrapper/Tomcat/tosca.interfaces.node.lifecycle.Standard/stop/_a4c_stop.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3432"
},
{
"name": "Gherkin",
"bytes": "624"
},
{
"name": "Java",
"bytes": "575855"
},
{
"name": "Python",
"bytes": "4128402"
},
{
"name": "Shell",
"bytes": "157900"
}
],
"symlink_target": ""
}
|
from __future__ import print_function
from bongo.apps.archive import models as archive_models
from bongo.apps.bongo import models as bongo_models
from bongo.apps.bongo.helpers import strip_tags
from django.core.management.base import BaseCommand
from django.core.files.storage import default_storage as storage
from django.utils.timezone import make_aware
from django.core.files.base import ContentFile
from django.utils.text import slugify
from django.test import override_settings
from django.db import connection
from django.conf import settings
from datetime import datetime
from optparse import make_option
from ipdb import launch_ipdb_on_exception
import pytz
import requests
options = None
session = None
tz = pytz.timezone('America/New_York')
cursor = connection.cursor()
def memcheck():
"""Checks memory usage; this script used to run out of memory so this reporting was helpful.
Now the script runs out of CPU instead! slow clap
"""
import resource
return resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / 1000000.0
def staticfiler(obj, filename, local_path, remote_uri):
"""couple of cases here:
- file already exists on the system, has filesize of 0
- file already exists on system, has a filesize > 0
- file does not exist, nodownload is set
- file does not exist, nodownload is off, reading from local copy fails
- file does not exist, nodownload is off, download succeeds
"""
global options
global session
if options.get("verbose"):
print("looking for file {}...".format(filename), end = " ")
stale_copy = False
if storage.exists(local_path):
if options.get("verbose"):
print("It already exists", end = "")
if storage.size(local_path) > 0 or options.get("ign_empt"):
stale_copy = storage.open(local_path, 'rb')
f = ContentFile(stale_copy.read())
stale_copy.close()
if options.get("verbose"):
print("")
else:
if options.get("verbose"):
print(", but its filesize is 0.")
storage.delete(local_path)
if not stale_copy and not options.get('nodownload'):
if options.get("verbose"):
print("Getting it from bowdoinorient.com/{}...".format(remote_uri), end = " ")
try:
r = session.get("http://bowdoinorient.com/" + remote_uri, timeout = 1)
if r.status_code == 200:
f = ContentFile(r.content)
else:
if options.get("verbose"):
print('Failed because of a {} response code'.format(r.status_code))
f = ContentFile("")
except requests.exceptions.RequestException as e:
if options.get("verbose"):
print(e)
f = ContentFile("")
elif not stale_copy and options.get('nodownload'):
if options.get("verbose"):
print("Faking the download.")
f = ContentFile("")
obj.save(filename, f)
f.close()
def datetimeify(d):
"""Convert a date to a datetime, do nothing to a datetime"""
if d.__class__.__name__ == "datetime":
return d
elif d.__class__.__name__ == "date":
return datetime.combine(d, datetime.min.time())
else:
raise Exception("Things are really fucked: datetimeify called with a " + d.__class__.__name__)
def import_ads():
""" Import the old ads table into the new Advertiser, Ad models
There aren't actually any, so this is pointless
"""
global options
for old_ad in archive_models.Ads.objects.using('archive').all().iterator():
if options.get("verbose"):
print("importing ad #{}".format(old_ad.pk))
(advertiser, created) = bongo_models.Advertiser.objects.get_or_create(
imported = True,
name = old_ad.sponsor
)
(ad, created) = bongo_models.Ad.objects.get_or_create(
imported = True,
pk = old_ad.id,
run_from = make_aware(datetimeify(old_ad.start_date), tz),
run_through = make_aware(datetimeify(old_ad.end_date), tz),
url = old_ad.link,
owner = advertiser,
)
staticfiler(
ad.adfile,
old_ad.filename,
"ads/" + old_ad.filename,
"ads/" + old_ad.filename
)
ad.save()
def import_tips():
"""Import the old tips table into the new Tip model"""
global options
for old_tip in archive_models.Tips.objects.using('archive').all().iterator():
if options.get("verbose"):
print("importing tip #{}".format(old_tip.pk))
(tip, created) = bongo_models.Tip.objects.get_or_create(
imported = True,
pk = old_tip.id,
content = old_tip.tip,
submitted_at = make_aware(datetimeify(old_tip.submitted), tz),
submitted_from = old_tip.user_ip,
useragent = old_tip.user_agent
)
def import_alerts():
"""Import the old alerts table into the new Alert model"""
global options
for old_alert in archive_models.Alerts.objects.using('archive').all().iterator():
if options.get("verbose"):
print("importing alert #{}".format(old_alert.pk))
if not old_alert.end_date or not old_alert.start_date:
if options.get("verbose"):
print("Refusing to commit an alert with a null datetime")
continue
(alert, created) = bongo_models.Alert.objects.get_or_create(
imported = True,
pk = old_alert.id,
run_from = old_alert.start_date,
run_through = old_alert.end_date,
urgent = True if old_alert.urgent == 1 else False,
message = old_alert.message
)
def import_volumes():
"""Import the old volumes table into the new Volume model"""
global options
for old_volume in archive_models.Volume.objects.using('archive').all().iterator():
if options.get("verbose"):
print("importing volume #{}".format(old_volume.pk))
(volume, created) = bongo_models.Volume.objects.get_or_create(
imported = True,
pk = old_volume.id,
volume_number = old_volume.arabic,
volume_year_start = int(old_volume.annodomini), # toph.... why
volume_year_end = int(old_volume.annodomini) + 1
)
def import_issues():
"""Import the old issues table into the new Issue model"""
global options
for old_issue in archive_models.Issue.objects.using('archive').all().iterator():
if options.get("verbose"):
print("importing issue #{}".format(old_issue.pk))
try:
vol = bongo_models.Volume.objects.get(volume_number__exact = old_issue.volume)
except bongo_models.Volume.DoesNotExist as e:
# Cover the cases where nobody created Volume 144 or 145 in the DB
# @TODO: lots of magic numbers in here
if old_issue.volume in [144, 145]:
(vol, created) = bongo_models.Volume.objects.get_or_create(
id = archive_models.Volume.objects.using('archive').all().count() + old_issue.volume - 143,
volume_number = old_issue.volume,
volume_year_start = 1870 + old_issue.volume,
volume_year_end = 1870 + old_issue.volume + 1,
imported = True
)
else:
raise e
(issue, created) = bongo_models.Issue.objects.get_or_create(
imported = True,
pk = old_issue.id,
issue_date = old_issue.issue_date,
issue_number = old_issue.issue_number,
volume = vol,
scribd = old_issue.scribd,
# @TODO: Host our own PDFs?
)
def import_series():
"""Import the old series table into the new Series model"""
global options
for old_series in archive_models.Series.objects.using('archive').all().iterator():
if options.get("verbose"):
print("importing series #{}".format(old_series.pk))
(series, created) = bongo_models.Series.objects.get_or_create(
imported = True,
pk = old_series.id,
name = old_series.name
)
def import_section():
"""Import the old sections table into the new Section model"""
global options
for old_section in archive_models.Section.objects.using('archive').all().iterator():
if options.get("verbose"):
print("importing section #{}".format(old_section.pk))
(section, created) = bongo_models.Section.objects.get_or_create(
imported = True,
pk = old_section.id,
section = old_section.shortname,
priority = old_section.priority
)
def import_job():
"""Import the old jobs table into the new Job model"""
global options
for old_job in archive_models.Job.objects.using('archive').all().iterator():
if options.get("verbose"):
print("importing job #{}".format(old_job.pk))
(job, created) = bongo_models.Job.objects.get_or_create(
imported = True,
pk = old_job.id,
title = strip_tags(old_job.name).title(),
)
def import_attachment():
global options
for old_attachment in archive_models.Attachments.objects.using('archive').all().iterator():
if options.get("verbose"):
print("importing attachment #{}".format(old_attachment.pk))
if old_attachment.id <= 5:
# Attachments 1-5 are absent in the current frontend and have the wrong content1/content2
# ordering. I'm comfortable dropping them.
continue
else:
if old_attachment.type == "html":
(atchmt, created) = bongo_models.HTML.objects.get_or_create(
imported = True,
pk = old_attachment.id,
content = old_attachment.content1
)
elif old_attachment.type == "vimeo":
(atchmt, created) = bongo_models.Video.objects.get_or_create(
imported = True,
pk = old_attachment.id,
host = "Vimeo",
uid = old_attachment.content1,
)
elif old_attachment.type == "youtube":
(atchmt, created) = bongo_models.Video.objects.get_or_create(
imported = True,
pk = old_attachment.id,
host = "YouTube",
uid = old_attachment.content1,
)
elif old_attachment.type == "pullquote":
(atchmt, created) = bongo_models.Pullquote.objects.get_or_create(
imported = True,
pk = old_attachment.id,
quote = old_attachment.content1,
attribution = old_attachment.content2
)
if old_attachment.type != "pullquote":
atchmt.caption = old_attachment.content2
atchmt.save()
if old_attachment.author_id:
creator = bongo_models.Creator.objects.get(
pk__exact = old_attachment.author_id
)
atchmt.creators.add(creator)
atchmt.save()
try:
post = bongo_models.Post.objects.get(
pk__exact = old_attachment.article_id
)
except:
if options.get("verbose"):
print("Attachment {} wants to connect to post {}, but that post does not exist.".format(
old_attachment.id,
old_attachment.article_id
))
if old_attachment.type == "html":
post.html.add(atchmt)
elif old_attachment.type == "vimeo":
post.video.add(atchmt)
elif old_attachment.type == "youtube":
post.video.add(atchmt)
elif old_attachment.type == "pullquote":
post.pullquote.add(atchmt)
post.save()
@override_settings(DEBUG = False)
def import_content():
global options
archive_articles = archive_models.Article.objects.using('archive').all().iterator()
archive_articlebodies = archive_models.Articlebody.objects.using('archive')
archive_articleauthors = archive_models.Articleauthor.objects.using('archive')
archive_authors = archive_models.Author.objects.using('archive')
for old_article in archive_articles:
if options.get("verbose"):
print("importing article #{}".format(old_article.pk))
# get the Text
try:
old_articlebody = archive_articlebodies.filter(
article_id = old_article.id
).order_by("-timestamp")[0]
except:
old_articlebody = None
# get the Creator(s)
old_authors = []
for old_articleauthor in archive_articleauthors.filter(article_id__exact = old_article.id):
old_authors.append(archive_authors.get(id__exact = old_articleauthor.author_id))
# If an article has no volume number, try to guess it by the year. Better than nothing.
# This shouldn't actually ever be invoked now that I did some manual DB cleanup
if old_article.volume == 0:
old_article.volume = old_article.date_created.year - 1870
# If any of these fields are missing, set them to the unix epoch
if old_article.date_created is None:
old_article.date_created = make_aware(datetime(1970, 1, 1), tz)
if old_article.date_updated is None:
old_article.date_updated = make_aware(datetime(1970, 1, 1), tz)
if old_article.date_published is None:
old_article.date_published = make_aware(datetime(1970, 1, 1), tz)
try:
iss = bongo_models.Issue.objects.get(
issue_number__exact = old_article.issue_number,
volume__exact = bongo_models.Volume.objects.get(
volume_number__exact = old_article.volume
)
)
except bongo_models.Issue.DoesNotExist:
# Some articles specify an issue that does not exist (cough, 9989)
# Set their issue to be the existing issue with date closest to the article date
iss_before = bongo_models.Issue.objects.filter(
issue_date__gt=old_article.date_created.date()
).order_by('issue_date').first()
iss_after = bongo_models.Issue.objects.filter(
issue_date__lt=old_article.date_created.date()
).order_by('-issue_date').first()
if not iss_before and not iss_after:
raise Exception("Can't find any issues near this article's date")
elif not iss_before:
iss = iss_after
elif not iss_after:
iss = iss_before
elif (
old_article.date_created.date() - iss_before.issue_date >
iss_after.issue_date - old_article.date_created.date()
):
iss = iss_after
else:
iss = iss_before
(post, created) = bongo_models.Post.objects.get_or_create(
imported = True,
pk = old_article.id,
published = old_article.date_published,
is_published = (
True if old_article.published == 1 else False
),
opinion = (
True if old_article.opinion == 1 else False
),
issue = iss,
volume = bongo_models.Volume.objects.get(
volume_number__exact = old_article.volume
),
section = bongo_models.Section.objects.get(
pk__exact = old_article.section_id
),
title = old_article.title,
views_local = old_article.views_bowdoin,
views_global = old_article.views,
)
if created:
post.created = old_article.date_created
post.updated = old_article.date_updated
if old_articlebody:
(text, created) = bongo_models.Text.objects.get_or_create(
imported = True,
pk = old_articlebody.id,
body = old_articlebody.body
)
post.text.add(text)
if old_authors:
for old_author in old_authors:
text.creators.add(bongo_models.Creator.objects.get(pk__exact = old_author.id))
text.save()
post.primary_type = "text"
if old_article.series != 0:
post.series.add(
bongo_models.Series.objects.get(
pk__exact = old_article.series
)
)
if post.series.all()[0].name == "Snapshot":
post.primary_type = "photo"
post.save(auto_dates = False) # prevent auto-save of created and updated fields
def import_creator():
global options
for old_author in archive_models.Author.objects.using('archive').all().iterator():
if options.get("verbose"):
print("importing author #{}".format(old_author.pk))
(creator, created) = bongo_models.Creator.objects.get_or_create(
imported = True,
pk = old_author.id,
name = old_author.name,
)
if old_author.job:
creator.job = bongo_models.Job.objects.get(pk__exact = old_author.job)
creator.save()
if old_author.photo:
staticfiler(
creator.profpic,
slugify(old_author.name) + ".jpg",
"headshots/" + slugify(old_author.name) + ".jpg",
"images/authors/" + old_author.photo
)
creator.save()
def import_photo():
global options
new_ids_created = 1
for old_photo in archive_models.Photo.objects.using('archive').all().iterator():
if options.get("verbose"):
print("importing photo #{}".format(old_photo.pk))
(photo, created) = bongo_models.Photo.objects.get_or_create(
imported = True,
pk = old_photo.id,
caption = old_photo.caption,
)
try:
image_url = "images/{date}/{fname}".format(
date = (old_photo.article_date if old_photo.article_date else archive_models.Article.objects.using(
'archive'
).get(id__exact = old_photo.article_id).date),
fname = (old_photo.filename_original if old_photo.filename_original else old_photo.filename_large)
)
staticfiler(
photo.staticfile,
str(old_photo.id) + ".jpg",
"photos/" + str(old_photo.id) + ".jpg",
image_url
)
except Exception as e:
if options.get("verbose"):
print(e)
# Courtesy photos have a photographer id of 1, which doesn't exist.
# We have to come up with a new id for this photographer that doesn't interfere with any existing id
if old_photo.photographer_id == 1:
(photographer, created) = bongo_models.Creator.objects.get_or_create(
imported = True,
name = old_photo.credit,
courtesyof = True,
pk = archive_models.Author.objects.using('archive').latest('id').id + new_ids_created
)
photo.creators.add(photographer)
new_ids_created += 1
elif old_photo.photographer_id == 0:
pass
else:
try:
photo.creators.add(
bongo_models.Creator.objects.get(
pk__exact = old_photo.photographer_id
)
)
except:
if options.get("verbose"):
print("Issues crediting this photo to author #" + str(old_photo.photographer_id))
photo.save()
try:
post_owner = bongo_models.Post.objects.get(
pk__exact = old_photo.article_id
)
post_owner.photo.add(photo)
post_owner.save()
except:
if options.get("verbose"):
print("The article owning this photo has been deleted.")
class Command(BaseCommand):
help = "Echo all positional arguments."
option_list = BaseCommand.option_list + (
make_option(
'--verbose',
dest = 'verbose',
action = 'store_true',
default = False,
help = "Print verbose logging."
),
make_option(
'--fake',
dest = 'nodownload',
action = 'store_true',
default = False,
help = "Fake downloading of files."
),
make_option(
'--ignore_empties',
dest = 'ign_empt',
action = 'store_true',
default = False,
help = "Do not attempt to replace empty files."
),
make_option(
'--ipdb',
dest = 'ipdb',
action = 'store_true',
default = False,
help = "Launch ipdb if an unhandled exception occurs."
)
)
def handle(self, *args, **opts):
global options
options = opts
global session
session = requests.Session()
# Dynamically append mysql db to settings
settings.DATABASES['archive'] = {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'DB02Orient',
'USER': 'root',
'PASSWORD': settings.MYSQL_PASS,
'HOST': '127.0.0.1',
'PORT': '3306',
}
with launch_ipdb_on_exception() if options.get("ipdb") else dummy_context_mgr():
import_ads()
import_tips()
import_alerts()
import_volumes()
import_issues()
import_series()
import_section()
import_job()
import_creator()
import_content()
import_attachment()
import_photo()
class dummy_context_mgr():
def __enter__(self):
return None
def __exit__(self, *args):
return False
|
{
"content_hash": "02a385514b4b726cad98ba82dee40779",
"timestamp": "",
"source": "github",
"line_count": 645,
"max_line_length": 115,
"avg_line_length": 35.08527131782946,
"alnum_prop": 0.552364118426867,
"repo_name": "BowdoinOrient/bongo",
"id": "798631171fd868ecea00ad88f01480c087e87554",
"size": "22630",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "bongo/apps/archive/management/commands/importdb.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "26609"
},
{
"name": "HTML",
"bytes": "20898"
},
{
"name": "JavaScript",
"bytes": "3005"
},
{
"name": "Python",
"bytes": "169382"
},
{
"name": "Shell",
"bytes": "2173"
}
],
"symlink_target": ""
}
|
import tensorflow as tf
from config import parse_args, FLAGS
from tfsolver import TFSolver
from network_factory import seg_network
from dataset import DatasetFactory
from ocnn import loss_functions_seg, build_solver, get_seg_label
from libs import points_property, octree_property, octree_decode_key
# Add config
FLAGS.LOSS.point_wise = True
# get the label and pts
def get_point_info(points, mask_ratio=0, mask=-1):
with tf.name_scope('points_info'):
pts = points_property(points, property_name='xyz', channel=4)
label = points_property(points, property_name='label', channel=1)
label = tf.reshape(label, [-1])
label_mask = label > mask # mask out invalid points, -1
if mask_ratio > 0: # random drop some points to speed up training
rnd_mask = tf.random.uniform(tf.shape(label_mask)) > mask_ratio
label_mask = tf.logical_and(label_mask, rnd_mask)
pts = tf.boolean_mask(pts, label_mask)
label = tf.boolean_mask(label, label_mask)
return pts, label
# IoU
def tf_IoU_per_shape(pred, label, class_num, mask=-1):
with tf.name_scope('IoU'):
# Set mask to 0 to filter unlabeled points, whose label is 0
label_mask = label > mask # mask out label
pred = tf.boolean_mask(pred, label_mask)
label = tf.boolean_mask(label, label_mask)
pred = tf.argmax(pred, axis=1, output_type=tf.int32)
intsc, union = [None] * class_num, [None] * class_num
for k in range(class_num):
pk, lk = tf.equal(pred, k), tf.equal(label, k)
intsc[k] = tf.reduce_sum(tf.cast(pk & lk, dtype=tf.float32))
union[k] = tf.reduce_sum(tf.cast(pk | lk, dtype=tf.float32))
return intsc, union
# define the graph
class ComputeGraphSeg:
def __init__(self, flags):
self.flags = flags
def create_dataset(self, flags_data):
return DatasetFactory(flags_data)(return_iter=True)
def __call__(self, dataset='train', training=True, reuse=False, gpu_num=1):
FLAGS = self.flags
with tf.device('/cpu:0'):
flags_data = FLAGS.DATA.train if dataset == 'train' else FLAGS.DATA.test
data_iter = self.create_dataset(flags_data)
tower_tensors = []
for i in range(gpu_num):
with tf.device('/gpu:%d' % i):
with tf.name_scope('device_%d' % i):
octree, _, points = data_iter.get_next()
pts, label = get_point_info(points, flags_data.mask_ratio)
if not FLAGS.LOSS.point_wise:
pts, label = None, get_seg_label(octree, FLAGS.MODEL.depth_out)
logit = seg_network(octree, FLAGS.MODEL, training, reuse, pts=pts)
losses = loss_functions_seg(logit, label, FLAGS.LOSS.num_class,
FLAGS.LOSS.weight_decay, 'ocnn', mask=0)
tensors = losses + [losses[0] + losses[2]] # total loss
names = ['loss', 'accu', 'regularizer', 'total_loss']
if flags_data.batch_size == 1:
num_class = FLAGS.LOSS.num_class
intsc, union = tf_IoU_per_shape(logit, label, num_class, mask=0)
iou = tf.constant(0.0) # placeholder, calc its value later
tensors = [iou] + tensors + intsc + union
names = ['iou'] + names + \
['intsc_%d' % i for i in range(num_class)] + \
['union_%d' % i for i in range(num_class)]
tower_tensors.append(tensors)
reuse = True
tensors = tower_tensors[0] if gpu_num == 1 else list(zip(*tower_tensors))
return tensors, names
# define the solver
class PartNetSolver(TFSolver):
def __init__(self, flags, compute_graph, build_solver=build_solver):
super(PartNetSolver, self).__init__(flags, compute_graph, build_solver)
self.num_class = flags.LOSS.num_class # used to calculate the IoU
def result_callback(self, avg_results):
# calc part-IoU, update `iou`, this is in correspondence with Line 77
iou_avg = 0.0
ious = [0] * self.num_class
for i in range(1, self.num_class): # !!! Ignore the first label
instc_i = avg_results[self.test_names.index('intsc_%d' % i)]
union_i = avg_results[self.test_names.index('union_%d' % i)]
ious[i] = instc_i / (union_i + 1.0e-10)
iou_avg = iou_avg + ious[i]
iou_avg = iou_avg / (self.num_class - 1)
avg_results[self.test_names.index('iou')] = iou_avg
return avg_results
# run the experiments
if __name__ == '__main__':
FLAGS = parse_args()
compute_graph = ComputeGraphSeg(FLAGS)
solver = PartNetSolver(FLAGS, compute_graph)
solver.run()
|
{
"content_hash": "a555c6ac1f0f3c8da5439178c82fa137",
"timestamp": "",
"source": "github",
"line_count": 115,
"max_line_length": 78,
"avg_line_length": 39.14782608695652,
"alnum_prop": 0.6314971123944914,
"repo_name": "microsoft/O-CNN",
"id": "bcf391235ab2b926b02107121dd6128b5b75a283",
"size": "4502",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tensorflow/script/run_seg_partnet.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1028"
},
{
"name": "C++",
"bytes": "1168252"
},
{
"name": "CMake",
"bytes": "12150"
},
{
"name": "Cuda",
"bytes": "107918"
},
{
"name": "Dockerfile",
"bytes": "2505"
},
{
"name": "MATLAB",
"bytes": "989"
},
{
"name": "Python",
"bytes": "379722"
}
],
"symlink_target": ""
}
|
"""
Copyright 2016 The Trustees of Princeton University
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import sys
import subprocess
import random
import time
import testlib
import testconf
import shutil
PUT_PATH = os.path.join(testconf.SYNDICATE_UG_ROOT, "syndicate-put")
CAT_PATH = os.path.join(testconf.SYNDICATE_UG_ROOT, "syndicate-cat")
RG_PATH = os.path.join(testconf.SYNDICATE_RG_ROOT, "syndicate-rg")
RG_DRIVER = os.path.join(testconf.SYNDICATE_PYTHON_ROOT, "syndicate/rg/drivers/disk" )
NUM_FILES = 1
def stop_and_save( output_dir, proc, out_path, save_name ):
exitcode, out = testlib.stop_gateway( proc, out_path )
testlib.save_output( output_dir, save_name, out )
return exitcode, out
if __name__ == "__main__":
local_path = testlib.make_random_file(16384)
local_fd = open(local_path, "r")
expected_data = local_fd.read()
local_fd.close()
config_dir, output_dir = testlib.test_setup()
volume_name = testlib.add_test_volume( config_dir, allow_anon=True, private=False )
RG_gateway_name = testlib.add_test_gateway( config_dir, volume_name, "RG", caps="NONE", email=testconf.SYNDICATE_ADMIN )
testlib.update_gateway( config_dir, RG_gateway_name, "port=31112", "driver=%s" % RG_DRIVER )
rg_proc, rg_out_path = testlib.start_gateway( config_dir, RG_PATH, testconf.SYNDICATE_ADMIN, volume_name, RG_gateway_name, valgrind=True )
if not testlib.gateway_ping( 31112, 15 ):
raise Exception("%s exited %s" % (RG_PATH, rg_proc.poll()))
# should cause the RG to get updated that there's a new gateway
gateway_name = testlib.add_test_gateway( config_dir, volume_name, "UG", caps="ALL", email=testconf.SYNDICATE_ADMIN )
cat_gateway_name = testlib.add_test_gateway( config_dir, volume_name, "UG", caps="ALL", email="ANONYMOUS" )
random_part = hex(random.randint(0, 2**32-1))[2:]
output_paths = []
for i in xrange(0, NUM_FILES):
output_path = "/put-%s-%s" % (random_part, i)
output_paths.append(output_path)
exitcode, out = testlib.run( PUT_PATH, '-d2', '-f', '-c', os.path.join(config_dir, 'syndicate.conf'), '-u', testconf.SYNDICATE_ADMIN, '-v', volume_name, '-g', gateway_name, local_path, output_path, valgrind=True )
testlib.save_output( output_dir, "syndicate-put-%s" % i, out )
if exitcode != 0:
stop_and_save( output_dir, rg_proc, rg_out_path, "syndicate-rg")
raise Exception("%s exited %s" % (PUT_PATH, exitcode))
for i in xrange(0, NUM_FILES):
path = output_paths[i]
exitcode, out = testlib.run( CAT_PATH, '-d2', '-f', '-c', os.path.join(config_dir, 'syndicate.conf'), '-u', 'ANONYMOUS', '-v', volume_name, '-g', cat_gateway_name, path, valgrind=True )
testlib.save_output( output_dir, 'syndicate-cat-%s' % i, out )
if exitcode != 0:
stop_and_save( output_dir, rg_proc, rg_out_path, "syndicate-rg")
raise Exception("%s exited %s" % (CAT_PATH, exitcode))
# check for correctnes
if expected_data not in out:
stop_and_save( output_dir, rg_proc, rg_out_path, "syndicate-rg")
raise Exception("data not found in output")
rg_exitcode, rg_out = stop_and_save( output_dir, rg_proc, rg_out_path, "syndicate-rg")
if rg_exitcode != 0:
raise Exception("%s exited %s" % (RG_PATH, rg_exitcode))
sys.exit(0)
|
{
"content_hash": "97f96b14339c681b12ad241af5462f26",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 221,
"avg_line_length": 43.032967032967036,
"alnum_prop": 0.6608784473953013,
"repo_name": "iychoi/syndicate-core",
"id": "5e5703d8670eac39bf93042109cb42432d635cda",
"size": "3939",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/gateways/syndicate-put-cat-anonymous.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "119973"
},
{
"name": "C++",
"bytes": "1774442"
},
{
"name": "Makefile",
"bytes": "20511"
},
{
"name": "Python",
"bytes": "2257256"
},
{
"name": "Shell",
"bytes": "42197"
}
],
"symlink_target": ""
}
|
from os.path import join, dirname, realpath
import kivy
from kivy.app import App
from kivy.uix.boxlayout import BoxLayout
from plyer import notification
from plyer.utils import platform
kivy.require('1.8.0')
class NotificationDemo(BoxLayout):
def do_notify(self, mode='normal'):
title = self.ids.notification_title.text
message = self.ids.notification_text.text
ticker = self.ids.ticker_text.text
kwargs = {'title': title, 'message': message, 'ticker': ticker}
if mode == 'fancy':
kwargs['app_name'] = "Plyer Notification Example"
if platform == "win":
kwargs['app_icon'] = join(dirname(realpath(__file__)),
'plyer-icon.ico')
kwargs['timeout'] = 4
else:
kwargs['app_icon'] = join(dirname(realpath(__file__)),
'plyer-icon.png')
elif mode == 'toast':
kwargs['toast'] = True
notification.notify(**kwargs)
class NotificationDemoApp(App):
def build(self):
return NotificationDemo()
def on_pause(self):
return True
if __name__ == '__main__':
NotificationDemoApp().run()
|
{
"content_hash": "f87a318d9537e4aa02aba93e35c2cc64",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 71,
"avg_line_length": 28.272727272727273,
"alnum_prop": 0.5659163987138264,
"repo_name": "kivy/plyer",
"id": "be6e5467514937a96b710bcb58661ae913784442",
"size": "1244",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/notification/main.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1206"
},
{
"name": "HTML",
"bytes": "19384"
},
{
"name": "Makefile",
"bytes": "868"
},
{
"name": "PowerShell",
"bytes": "504"
},
{
"name": "Python",
"bytes": "459341"
},
{
"name": "Shell",
"bytes": "6702"
}
],
"symlink_target": ""
}
|
class ControllerException(Exception):
''' '''
def __init__(self, reason):
''' '''
Exception.__init__(self, reason)
class UnknownService(Exception):
''' '''
def __init__(self, reason):
''' '''
Exception.__init__(self, reason)
|
{
"content_hash": "d9d62c8a5cff535cd01df40bbe429784",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 40,
"avg_line_length": 19.785714285714285,
"alnum_prop": 0.5090252707581228,
"repo_name": "seraphlnWu/creditor",
"id": "dba84b097ff6c9792f49f0059dcab885d0db65ec",
"size": "293",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "observer/node/errors.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "113609"
}
],
"symlink_target": ""
}
|
"""Added SSL fields in network table.
Revision ID: 50135956b24
Revises: 140c0dd3605
Create Date: 2015-12-10 20:46:37.397744
"""
# revision identifiers, used by Alembic.
revision = '50135956b24'
down_revision = '140c0dd3605'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('networks', sa.Column('ssl', sa.Boolean(), nullable=True))
op.add_column('networks', sa.Column('ssl_verify', sa.String(length=255), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('networks', 'ssl_verify')
op.drop_column('networks', 'ssl')
### end Alembic commands ###
|
{
"content_hash": "6294fd1e2543d112f825af7b25536c3c",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 92,
"avg_line_length": 26.566666666666666,
"alnum_prop": 0.6888331242158093,
"repo_name": "waartaa/ircb",
"id": "79aa19133d4fdb0c9681707d3ccc4978e9f21ee7",
"size": "797",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "alembic/versions/50135956b24_added_ssl_fields_in_network_table.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "111929"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import, unicode_literals
import json
import logging
import traceback
import time
from google.appengine.api import app_identity, mail, capabilities
from google.appengine.runtime import DeadlineExceededError
from tekton.gae.middleware import Middleware
from tekton.router import PathNotFound
def get_apis_statuses(e):
if not isinstance(e, DeadlineExceededError):
return {}
t1 = time.time()
statuses = {
'blobstore': capabilities.CapabilitySet('blobstore').is_enabled(),
'datastore_v3': capabilities.CapabilitySet('datastore_v3').is_enabled(),
'datastore_v3_write': capabilities.CapabilitySet('datastore_v3', ['write']).is_enabled(),
'images': capabilities.CapabilitySet('images').is_enabled(),
'mail': capabilities.CapabilitySet('mail').is_enabled(),
'memcache': capabilities.CapabilitySet('memcache').is_enabled(),
'taskqueue': capabilities.CapabilitySet('taskqueue').is_enabled(),
'urlfetch': capabilities.CapabilitySet('urlfetch').is_enabled(),
}
t2 = time.time()
statuses['time'] = t2 - t1
return statuses
def send_error_to_admins(settings, exception, handler, render, template):
tb = traceback.format_exc()
errmsg = exception.message
logging.error(errmsg)
logging.error(tb)
handler.response.write(render(template))
appid = app_identity.get_application_id()
subject = 'ERROR in %s: [%s] %s' % (appid, handler.request.path, errmsg)
body = """
------------- request ------------
%s
----------------------------------
------------- GET params ---------
%s
----------------------------------
----------- POST params ----------
%s
----------------------------------
----------- traceback ------------
%s
----------------------------------
""" % (handler.request, handler.request.GET, handler.request.POST, tb)
body += 'API statuses = ' + json.dumps(get_apis_statuses(exception), indent=4)
mail.send_mail_to_admins(sender=settings.SENDER_EMAIL,
subject=subject,
body=body)
class EmailMiddleware(Middleware):
def handle_error(self, exception):
import settings # workaround. See https://github.com/renzon/zenwarch/issues/3
if isinstance(exception, PathNotFound):
self.handler.response.set_status(404)
send_error_to_admins(settings, exception, self.handler, self.dependencies['_render'],
settings.TEMPLATE_404_ERROR)
else:
self.handler.response.set_status(400)
send_error_to_admins(settings, exception, self.handler, self.dependencies['_render'],
settings.TEMPLATE_400_ERROR)
|
{
"content_hash": "c2083ba2f0d05e45c748d7d47f3b4f57",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 97,
"avg_line_length": 35.34615384615385,
"alnum_prop": 0.6050054406964092,
"repo_name": "cbeloni/pychronesapp",
"id": "91a317c474b013b53062a781f68876100decc113",
"size": "2781",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "backend/venv/lib/python2.7/site-packages/tekton/gae/middleware/email_errors.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "222"
},
{
"name": "JavaScript",
"bytes": "4045"
},
{
"name": "Python",
"bytes": "2504529"
},
{
"name": "Shell",
"bytes": "5853"
}
],
"symlink_target": ""
}
|
from distutils.core import setup
setup(name='django-foreignkeysearch',
version='1.0',
packages=['foreignkeysearch'])
|
{
"content_hash": "d405eefb1fdf69ce80c68cdd184a0ced",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 37,
"avg_line_length": 32.25,
"alnum_prop": 0.7131782945736435,
"repo_name": "schneck/django-foreignkeysearch",
"id": "79890519acca7670e66a477e9dcbf7402c057fa7",
"size": "129",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7937"
}
],
"symlink_target": ""
}
|
from __future__ import annotations
import logging
import pprint
import time
from classes import ReportRunner
from classes.dcm import DCM
from classes.report2bq import Report2BQ
from classes.report_type import Type
from io import StringIO
class DCMReportRunner(ReportRunner):
"""DCMReportRunner.
Run CM360 reports on demand.
"""
report_type = Type.CM
def __init__(self, cm_id: str=None, profile: str=None,
email: str=None, project: str=None, **unused) -> DCMReportRunner:
"""Initialize the runner.
The runner inherits from ReportRunner, which mandates the 'run' method.
Args:
cm_id (str, optional): CM report id. Defaults to None.
profile (str, optional): User's CM profile id. Defaults to None.
email (str, optional): User email for the token. Defaults to None.
project (str, optional): Project. Defaults to None but should be
pre-populated with the current project by the caller.
Returns:
DCMReportRunner: self
"""
self.email = email
self.cm_id = cm_id
self.cm_profile = profile
self.project = project
def run(self, unattended: bool=True) -> None:
"""Perform the report run
Args:
unattended (bool, optional): Is this a fire and forget (True) or wait
for the report to complete (False). Defaults to True.
"""
dcm = DCM(email=self.email, project=self.project, profile=self.cm_profile)
if unattended:
self._unattended_run(dcm)
else:
self._attended_run(dcm)
def _attended_run(self, dcm: DCM) -> None:
"""_attended_run.
Run the report and wait for it to finish.
Args:
dcm (DCM): The CM controller.
"""
successful = []
response = dcm.run_report(report_id=self.cm_id, synchronous=True)
if response:
buffer = StringIO()
pprint.pprint(response, stream=buffer)
logging.info(buffer.getvalue())
while response['status'] == 'PROCESSING':
time.sleep(60 * 0.5)
response = dcm.report_state(report_id=self.cm_id, file_id=response['id'])
buffer = StringIO()
pprint.pprint(response, stream=buffer)
logging.info(buffer.getvalue())
report2bq = Report2BQ(
cm=True, cm_id=self.cm_id, email=self.email, project=self.project,
profile=self.cm_profile
)
report2bq.handle_report_fetcher(fetcher=dcm, report_id=self.cm_id)
def _unattended_run(self, dcm: DCM) -> None:
"""_unattended_run.
Start the report running and store the run configuration in Firestore. This
will then be monitored for completion and import by the run-monitor.
Args:
dcm (DCM): The CM controller.
"""
response = dcm.run_report(report_id=self.cm_id, synchronous=False)
if response:
buffer = StringIO()
pprint.pprint(response, stream=buffer)
logging.info(buffer.getvalue())
runner = {
'type': Type.CM.value,
'project': self.project,
'report_id': self.cm_id,
'email': self.email,
'profile': self.cm_profile,
'file_id': response['id']
}
self.firestore.store_document(type=Type._RUNNING,
id=runner['report_id'], document=runner)
|
{
"content_hash": "7bbc0829f73e2974775ea7f46e58dd0f",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 80,
"avg_line_length": 29.88888888888889,
"alnum_prop": 0.6425030978934325,
"repo_name": "google/report2bq",
"id": "6baa4e9d70b11cb17ddf81f479fb4e050c46aa3d",
"size": "3803",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "application/classes/dcm_report_runner.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "689"
},
{
"name": "HTML",
"bytes": "13362"
},
{
"name": "JavaScript",
"bytes": "375"
},
{
"name": "Python",
"bytes": "435292"
},
{
"name": "Shell",
"bytes": "35343"
}
],
"symlink_target": ""
}
|
from math import pi
import matplotlib.pyplot as plt
# IMU exercise
# Copyright (c) 2015-2017 Kjeld Jensen kjen@mmmi.sdu.dk kj@kjen.dk
# Insert initialize code below ###################
# Uncomment the file to read ##
fileName = 'imu_razor_data_static.txt'
# IMU type
# imuType = 'vectornav_vn100'
imuType = 'sparkfun_razor'
# Variables for plotting ##
showPlot = True
plotData = []
# Initialize your variables here ##
myValue = 0.0
######################################################
# open the imu data file
f = open(fileName, "r")
# initialize variables
count = 0
# looping through file
for line in f:
count += 1
# split the line into CSV formatted data
line = line.replace('*', ',') # make the checkum another csv value
csv = line.split(',')
# keep track of the timestamps
ts_recv = float(csv[0])
if count == 1:
ts_now = ts_recv # only the first time
ts_prev = ts_now
ts_now = ts_recv
if imuType == 'sparkfun_razor':
# import data from a SparkFun Razor IMU (SDU firmware)
acc_x = int(csv[2]) / 1000.0 * 4 * 9.82
acc_y = int(csv[3]) / 1000.0 * 4 * 9.82
acc_z = int(csv[4]) / 1000.0 * 4 * 9.82
gyro_x = int(csv[5]) * 1 / 14.375 * pi / 180.0
gyro_y = int(csv[6]) * 1 / 14.375 * pi / 180.0
gyro_z = int(csv[7]) * 1 / 14.375 * pi / 180.0
elif imuType == 'vectornav_vn100':
# import data from a VectorNav VN-100 configured to output $VNQMR
acc_x = float(csv[9])
acc_y = float(csv[10])
acc_z = float(csv[11])
gyro_x = float(csv[12])
gyro_y = float(csv[13])
gyro_z = float(csv[14])
# Insert loop code below #########################
# Variables available
# ----------------------------------------------------
# count Current number of updates
# ts_prev Time stamp at the previous update
# ts_now Time stamp at this update
# acc_x Acceleration measured along the x axis
# acc_y Acceleration measured along the y axis
# acc_z Acceleration measured along the z axis
# gyro_x Angular velocity measured about the x axis
# gyro_y Angular velocity measured about the y axis
# gyro_z Angular velocity measured about the z axis
# Insert your code here ##
# 4.2.2
myValue += gyro_z * (ts_now - ts_prev)
# in order to show a plot use this function to append your value to a list:
plotData.append(myValue * 180.0 / pi)
######################################################
# closing the file
f.close()
# show the plot
if showPlot:
plt.title("Angular velocity static")
plt.ylabel("Angle")
plt.xlabel("Time")
plt.plot(plotData)
plt.savefig('imu_exercise_4.2.2_plot.png')
plt.show()
|
{
"content_hash": "4684b285fae9b685f9d7876d638ac581",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 79,
"avg_line_length": 28.68041237113402,
"alnum_prop": 0.5657800143781452,
"repo_name": "carentsen/RMUAST",
"id": "a8f5966519deff20a8e1660023cd571e8d3449e2",
"size": "2844",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rmuast_s17_module_5/exercise_imu/imu_exercise_4.2.2.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "2281"
},
{
"name": "Python",
"bytes": "146166"
},
{
"name": "Shell",
"bytes": "3126"
},
{
"name": "TeX",
"bytes": "115343"
}
],
"symlink_target": ""
}
|
"""
Load multiple pp diagnostic files, aggregate by year, day etc, calcualte mean, sum etc and save
"""
import os, sys
import datetime
import iris
import iris.unit as unit
from iris.coord_categorisation import add_categorised_coord
from iris.analysis.cartography import unrotate_pole
from iris.coords import DimCoord
import numpy as np
import re
import pdb
diag = '30201'
cube_names=['eastward_wind', 'northward_wind']
cube_names2=['x_wind','y_wind']
#pp_file_path='/projects/cascade/pwille/moose_retrievals/'
pp_file_path='/nfs/a90/eepdw/Data/EMBRACE/'
#experiment_ids = ['djzny', 'djznw', 'djznq', 'djzns', 'dkjxq', 'dklyu', 'dkmbq', 'dklwu', 'dklzq'] # All minus large 3
#experiment_ids = ['djzns', 'dkjxq', 'dklyu', 'dkmbq', 'dklwu', 'dklzq'] # All minus large 3
date_ranges=[[datetime.datetime(2011,8,18,0,0,0), datetime.datetime(2011,8,27,0,0,0)],
[datetime.datetime(2011,8,28,0,0,0),datetime.datetime(2011,9,5,0,0,0)]]
experiment_ids = ['dklyu', 'dkmbq', 'dkmgw']
regrid_model='djznw'
regrid_model_min1=regrid_model[:-1]
#def add_hour_of_day(cube, coord, name='hour'):
# add_categorised_coord(cube, name, coord,
# lambda coord, x: coord.units.num2date(x).hour)
dtmindt = datetime.datetime(2011,8,19,0,0,0)
dtmaxdt = datetime.datetime(2011,9,7,23,0,0)
dtmin = unit.date2num(dtmindt, 'hours since 1970-01-01 00:00:00', unit.CALENDAR_STANDARD)
dtmax = unit.date2num(dtmaxdt, 'hours since 1970-01-01 00:00:00', unit.CALENDAR_STANDARD)
time_constraint = iris.Constraint(time= lambda t: dtmin <= t.point <= dtmax)
fr = '%s%s/%s/%s.pp' % (pp_file_path, regrid_model_min1, regrid_model, diag)
fg = '%sdjzn/djznw/%s.pp' % (pp_file_path, diag)
try:
glob_load = iris.load_cube(fg, ('%s' % cube_names[0]) & time_constraint)
except iris.exceptions.ConstraintMismatchError:
glob_load = iris.load_cube(fg, ('%s' % cube_names2[0]) & time_constraint)
## Get time points from global LAM to use as time constraint when loading other runs
time_list = glob_load.coord('time').points
# Some models have radiation diagnostics that are 10s offset from others so checking int values of time
glob_tc = iris.Constraint(time= lambda t: int(t.point) in time_list.astype(int))
#glob_tc = iris.Constraint(time=time_list)
del glob_load
for experiment_id in experiment_ids:
expmin1 = experiment_id[:-1]
fu = '%s%s/%s/%s.pp' % (pp_file_path, expmin1, experiment_id, diag)
save_name = '%s%s/%s/%s_%s_mean_by_day.pp' % (pp_file_path, expmin1, experiment_id, experiment_id, diag)
print experiment_id
sys.stdout.flush()
try:
os.remove(save_name)
except OSError:
print '%s NOT REMOVED' % save_name
pass
try:
for cube_name in cube_names:
cube = iris.load_cube(fu, ('%s' % cube_name) & glob_tc)
time_coords = cube.coord('time')
#add_hour_of_day(cube, time_coords)
iris.coord_categorisation.add_day_of_year(cube, time_coords, name='day_of_year')
for height_slice in cube.slices(['time', 'grid_latitude', 'grid_longitude']):
mean = height_slice.collapsed('time', iris.analysis.MEAN)
iris.save(mean, save_name, append=True)
except iris.exceptions.ConstraintMismatchError:
for cube_name in cube_names2:
cube = iris.load_cube(fu, ('%s' % cube_name) & glob_tc)
time_coords = cube.coord('time')
#add_hour_of_day(cube, time_coords)
iris.coord_categorisation.add_day_of_year(cube, time_coords, name='day_of_year')
for height_slice in cube.slices(['time', 'grid_latitude', 'grid_longitude']):
mean = height_slice.collapsed('time', iris.analysis.MEAN)
iris.save(mean, save_name, append=True)
|
{
"content_hash": "95a920c428501f32c4d4a2b3e55fbfe2",
"timestamp": "",
"source": "github",
"line_count": 112,
"max_line_length": 119,
"avg_line_length": 33.63392857142857,
"alnum_prop": 0.6607379877886913,
"repo_name": "peterwilletts24/Python-Scripts",
"id": "61321ef3133ff211c0463228b76ca3a2ab116b42",
"size": "3767",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "EMBRACE/30201_mean_by_day.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "2242925"
},
{
"name": "Shell",
"bytes": "140"
}
],
"symlink_target": ""
}
|
'''Convert to and from Roman numerals
This program is part of 'Dive Into Python 3', a free Python book for
experienced programmers. Visit http://diveintopython3.org/ for the
latest version.
'''
class OutOfRangeError(ValueError): pass
class NotIntegerError(ValueError): pass
roman_numeral_map = (('M', 1000),
('CM', 900),
('D', 500),
('CD', 400),
('C', 100),
('XC', 90),
('L', 50),
('XL', 40),
('X', 10),
('IX', 9),
('V', 5),
('IV', 4),
('I', 1))
def to_roman(n):
'''convert integer to Roman numeral'''
if not (0 < n < 4000):
raise OutOfRangeError('number out of range (must be 1..3999)')
if not isinstance(n, int):
raise NotIntegerError('non-integers can not be converted')
result = ''
for numeral, integer in roman_numeral_map:
while n >= integer:
result += numeral
n -= integer
return result
def from_roman(s):
'''convert Roman numeral to integer'''
result = 0
index = 0
for numeral, integer in roman_numeral_map:
while s[index : index + len(numeral)] == numeral:
result += integer
index += len(numeral)
return result
# Copyright (c) 2009, Mark Pilgrim, All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 'AS IS'
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
|
{
"content_hash": "22e76f0e2479a2c29a35efcb90f5388f",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 82,
"avg_line_length": 39.82608695652174,
"alnum_prop": 0.633551673944687,
"repo_name": "ctasims/Dive-Into-Python-3",
"id": "41a870f7ff1a61586d815312675a25d80680f1dd",
"size": "2748",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/roman5.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "0"
},
{
"name": "Python",
"bytes": "1053014"
}
],
"symlink_target": ""
}
|
class UnknownCode(Exception):
pass
class TooShort(Exception):
pass
class TooLong(Exception):
pass
class LenNotSupported(Exception):
pass
class InconsistentLen(Exception):
pass
|
{
"content_hash": "3fff57882e9b6a1be8493690a9aa82ff",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 33,
"avg_line_length": 14.285714285714286,
"alnum_prop": 0.73,
"repo_name": "bmcorser/py-multihash",
"id": "fb561bf1acb6fd3ae6f73c04b7de4834ca83141e",
"size": "200",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "multihash/exceptions.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "6294"
}
],
"symlink_target": ""
}
|
from collections.abc import Callable
from math import exp
import os
import random
import numpy as np
import pytest
import openmc.data
from . import needs_njoy
@pytest.fixture(scope='module')
def h2o():
"""H in H2O thermal scattering data."""
directory = os.path.dirname(os.environ['OPENMC_CROSS_SECTIONS'])
filename = os.path.join(directory, 'c_H_in_H2O.h5')
return openmc.data.ThermalScattering.from_hdf5(filename)
@pytest.fixture(scope='module')
def graphite():
"""Graphite thermal scattering data."""
directory = os.path.dirname(os.environ['OPENMC_CROSS_SECTIONS'])
filename = os.path.join(directory, 'c_Graphite.h5')
return openmc.data.ThermalScattering.from_hdf5(filename)
@pytest.fixture(scope='module')
def h2o_njoy():
"""H in H2O generated using NJOY."""
endf_data = os.environ['OPENMC_ENDF_DATA']
path_h1 = os.path.join(endf_data, 'neutrons', 'n-001_H_001.endf')
path_h2o = os.path.join(endf_data, 'thermal_scatt', 'tsl-HinH2O.endf')
return openmc.data.ThermalScattering.from_njoy(
path_h1, path_h2o, temperatures=[293.6, 500.0])
@pytest.fixture(scope='module')
def hzrh():
"""H in ZrH thermal scattering data."""
endf_data = os.environ['OPENMC_ENDF_DATA']
filename = os.path.join(endf_data, 'thermal_scatt', 'tsl-HinZrH.endf')
return openmc.data.ThermalScattering.from_endf(filename)
@pytest.fixture(scope='module')
def hzrh_njoy():
"""H in ZrH generated using NJOY."""
endf_data = os.environ['OPENMC_ENDF_DATA']
path_h1 = os.path.join(endf_data, 'neutrons', 'n-001_H_001.endf')
path_hzrh = os.path.join(endf_data, 'thermal_scatt', 'tsl-HinZrH.endf')
with_endf_data = openmc.data.ThermalScattering.from_njoy(
path_h1, path_hzrh, temperatures=[296.0], iwt=0
)
without_endf_data = openmc.data.ThermalScattering.from_njoy(
path_h1, path_hzrh, temperatures=[296.0], use_endf_data=False, iwt=1
)
return with_endf_data, without_endf_data
@pytest.fixture(scope='module')
def sio2():
"""SiO2 thermal scattering data."""
endf_data = os.environ['OPENMC_ENDF_DATA']
filename = os.path.join(endf_data, 'thermal_scatt', 'tsl-SiO2.endf')
return openmc.data.ThermalScattering.from_endf(filename)
def test_h2o_attributes(h2o):
assert h2o.name == 'c_H_in_H2O'
assert h2o.nuclides == ['H1']
assert h2o.temperatures == ['294K']
assert h2o.atomic_weight_ratio == pytest.approx(0.999167)
assert h2o.energy_max == pytest.approx(4.46)
assert isinstance(repr(h2o), str)
def test_h2o_xs(h2o):
assert not h2o.elastic
for temperature, func in h2o.inelastic.xs.items():
assert temperature.endswith('K')
assert isinstance(func, Callable)
def test_graphite_attributes(graphite):
assert graphite.name == 'c_Graphite'
assert graphite.nuclides == ['C0', 'C12', 'C13']
assert graphite.temperatures == ['296K']
assert graphite.atomic_weight_ratio == pytest.approx(11.898)
assert graphite.energy_max == pytest.approx(4.46)
def test_graphite_xs(graphite):
for temperature, func in graphite.elastic.xs.items():
assert temperature.endswith('K')
assert isinstance(func, openmc.data.CoherentElastic)
for temperature, func in graphite.inelastic.xs.items():
assert temperature.endswith('K')
assert isinstance(func, Callable)
elastic = graphite.elastic.xs['296K']
assert elastic([1e-3, 1.0]) == pytest.approx([0.0, 0.62586153])
@needs_njoy
def test_graphite_njoy():
endf_data = os.environ['OPENMC_ENDF_DATA']
path_c0 = os.path.join(endf_data, 'neutrons', 'n-006_C_000.endf')
path_gr = os.path.join(endf_data, 'thermal_scatt', 'tsl-graphite.endf')
graphite = openmc.data.ThermalScattering.from_njoy(
path_c0, path_gr, temperatures=[296.0])
assert graphite.nuclides == ['C0', 'C12', 'C13']
assert graphite.atomic_weight_ratio == pytest.approx(11.898)
assert graphite.energy_max == pytest.approx(2.02)
assert graphite.temperatures == ['296K']
@needs_njoy
def test_export_to_hdf5(tmpdir, h2o_njoy, hzrh_njoy, graphite):
filename = str(tmpdir.join('water.h5'))
h2o_njoy.export_to_hdf5(filename)
assert os.path.exists(filename)
# Graphite covers export of coherent elastic data
filename = str(tmpdir.join('graphite.h5'))
graphite.export_to_hdf5(filename)
assert os.path.exists(filename)
# H in ZrH covers export of incoherent elastic data, and incoherent
# inelastic angle-energy distributions
filename = str(tmpdir.join('hzrh.h5'))
hzrh_njoy[0].export_to_hdf5(filename)
assert os.path.exists(filename)
hzrh_njoy[1].export_to_hdf5(filename, 'w')
assert os.path.exists(filename)
@needs_njoy
def test_continuous_dist(h2o_njoy):
for temperature, dist in h2o_njoy.inelastic.distribution.items():
assert temperature.endswith('K')
assert isinstance(dist, openmc.data.IncoherentInelasticAE)
def test_h2o_endf():
endf_data = os.environ['OPENMC_ENDF_DATA']
filename = os.path.join(endf_data, 'thermal_scatt', 'tsl-HinH2O.endf')
h2o = openmc.data.ThermalScattering.from_endf(filename)
assert not h2o.elastic
assert h2o.atomic_weight_ratio == pytest.approx(0.99917)
assert h2o.energy_max == pytest.approx(3.99993)
assert h2o.temperatures == ['294K', '350K', '400K', '450K', '500K', '550K',
'600K', '650K', '800K']
def test_hzrh_attributes(hzrh):
assert hzrh.atomic_weight_ratio == pytest.approx(0.99917)
assert hzrh.energy_max == pytest.approx(1.9734)
assert hzrh.temperatures == ['296K', '400K', '500K', '600K', '700K', '800K',
'1000K', '1200K']
def test_hzrh_elastic(hzrh):
rx = hzrh.elastic
for temperature, func in rx.xs.items():
assert temperature.endswith('K')
assert isinstance(func, openmc.data.IncoherentElastic)
xs = rx.xs['296K']
sig_b, W = xs.bound_xs, xs.debye_waller
assert sig_b == pytest.approx(81.98006)
assert W == pytest.approx(8.486993)
for i in range(10):
E = random.uniform(0.0, hzrh.energy_max)
assert xs(E) == pytest.approx(sig_b/2 * ((1 - exp(-4*E*W))/(2*E*W)))
for temperature, dist in rx.distribution.items():
assert temperature.endswith('K')
assert dist.debye_waller > 0.0
@needs_njoy
def test_hzrh_njoy(hzrh_njoy):
endf, ace = hzrh_njoy
# First check version using ENDF incoherent elastic data
assert endf.atomic_weight_ratio == pytest.approx(0.999167)
assert endf.energy_max == pytest.approx(1.855)
assert endf.temperatures == ['296K']
# Now check version using ACE incoherent elastic data (discretized)
assert ace.atomic_weight_ratio == endf.atomic_weight_ratio
assert ace.energy_max == endf.energy_max
# Cross sections should be about the same (within 1%)
E = np.linspace(1e-5, endf.energy_max)
xs1 = endf.elastic.xs['296K'](E)
xs2 = ace.elastic.xs['296K'](E)
assert xs1 == pytest.approx(xs2, rel=0.01)
# Check discrete incoherent elastic distribution
d = ace.elastic.distribution['296K']
assert np.all((-1.0 <= d.mu_out) & (d.mu_out <= 1.0))
# Check discrete incoherent inelastic distribution
d = endf.inelastic.distribution['296K']
assert d.skewed
assert np.all((-1.0 <= d.mu_out) & (d.mu_out <= 1.0))
assert np.all((0.0 <= d.energy_out) & (d.energy_out < 3*endf.energy_max))
def test_sio2_attributes(sio2):
assert sio2.atomic_weight_ratio == pytest.approx(27.84423)
assert sio2.energy_max == pytest.approx(2.46675)
assert sio2.temperatures == ['294K', '350K', '400K', '500K', '800K',
'1000K', '1200K']
def test_sio2_elastic(sio2):
rx = sio2.elastic
for temperature, func in rx.xs.items():
assert temperature.endswith('K')
assert isinstance(func, openmc.data.CoherentElastic)
xs = rx.xs['294K']
assert len(xs) == 317
assert xs.bragg_edges[0] == pytest.approx(0.000711634)
assert xs.factors[0] == pytest.approx(2.6958e-14)
# Below first bragg edge, cross section should be zero
E = xs.bragg_edges[0] / 2.0
assert xs(E) == 0.0
# Between bragg edges, cross section is P/E where P is the factor
E = (xs.bragg_edges[0] + xs.bragg_edges[1]) / 2.0
P = xs.factors[0]
assert xs(E) == pytest.approx(P / E)
# Check the last Bragg edge
E = 1.1 * xs.bragg_edges[-1]
P = xs.factors[-1]
assert xs(E) == pytest.approx(P / E)
for temperature, dist in rx.distribution.items():
assert temperature.endswith('K')
assert dist.coherent_xs is rx.xs[temperature]
def test_get_thermal_name():
f = openmc.data.get_thermal_name
# Names which are recognized
assert f('lwtr') == 'c_H_in_H2O'
assert f('hh2o') == 'c_H_in_H2O'
with pytest.warns(UserWarning, match='is not recognized'):
# Names which can be guessed
assert f('lw00') == 'c_H_in_H2O'
assert f('graphite') == 'c_Graphite'
assert f('D_in_D2O') == 'c_D_in_D2O'
# Not in values, but very close
assert f('hluci') == 'c_H_in_C5O2H8'
assert f('ortho_d') == 'c_ortho_D'
# Names that don't remotely match anything
assert f('boogie_monster') == 'c_boogie_monster'
|
{
"content_hash": "b0d46c82b91ce771b6741401ffdf2620",
"timestamp": "",
"source": "github",
"line_count": 264,
"max_line_length": 80,
"avg_line_length": 35.27272727272727,
"alnum_prop": 0.6565721649484536,
"repo_name": "shikhar413/openmc",
"id": "dc4d24628b122d380e8abe59878a5decddc2e594",
"size": "9312",
"binary": false,
"copies": "9",
"ref": "refs/heads/develop",
"path": "tests/unit_tests/test_data_thermal.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "12770"
},
{
"name": "C++",
"bytes": "1486800"
},
{
"name": "CMake",
"bytes": "17562"
},
{
"name": "Dockerfile",
"bytes": "4924"
},
{
"name": "Python",
"bytes": "3230286"
},
{
"name": "Shell",
"bytes": "4001"
}
],
"symlink_target": ""
}
|
"""Support for the definition of zones."""
import logging
import voluptuous as vol
from homeassistant.core import callback
from homeassistant.loader import bind_hass
import homeassistant.helpers.config_validation as cv
from homeassistant.const import (
CONF_NAME,
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_ICON,
CONF_RADIUS,
EVENT_CORE_CONFIG_UPDATE,
)
from homeassistant.helpers import config_per_platform
from homeassistant.helpers.entity import async_generate_entity_id
from homeassistant.util import slugify
from homeassistant.const import ATTR_LATITUDE, ATTR_LONGITUDE
from homeassistant.util.location import distance
from .config_flow import configured_zones
from .const import CONF_PASSIVE, DOMAIN, HOME_ZONE, ATTR_PASSIVE, ATTR_RADIUS
from .zone import Zone
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "Unnamed zone"
DEFAULT_PASSIVE = False
DEFAULT_RADIUS = 100
ENTITY_ID_FORMAT = "zone.{}"
ENTITY_ID_HOME = ENTITY_ID_FORMAT.format(HOME_ZONE)
ICON_HOME = "mdi:home"
ICON_IMPORT = "mdi:import"
# The config that zone accepts is the same as if it has platforms.
PLATFORM_SCHEMA = vol.Schema(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Required(CONF_LATITUDE): cv.latitude,
vol.Required(CONF_LONGITUDE): cv.longitude,
vol.Optional(CONF_RADIUS, default=DEFAULT_RADIUS): vol.Coerce(float),
vol.Optional(CONF_PASSIVE, default=DEFAULT_PASSIVE): cv.boolean,
vol.Optional(CONF_ICON): cv.icon,
},
extra=vol.ALLOW_EXTRA,
)
@bind_hass
def async_active_zone(hass, latitude, longitude, radius=0):
"""Find the active zone for given latitude, longitude.
This method must be run in the event loop.
"""
# Sort entity IDs so that we are deterministic if equal distance to 2 zones
zones = (
hass.states.get(entity_id)
for entity_id in sorted(hass.states.async_entity_ids(DOMAIN))
)
min_dist = None
closest = None
for zone in zones:
if zone.attributes.get(ATTR_PASSIVE):
continue
zone_dist = distance(
latitude,
longitude,
zone.attributes[ATTR_LATITUDE],
zone.attributes[ATTR_LONGITUDE],
)
within_zone = zone_dist - radius < zone.attributes[ATTR_RADIUS]
closer_zone = closest is None or zone_dist < min_dist
smaller_zone = (
zone_dist == min_dist
and zone.attributes[ATTR_RADIUS] < closest.attributes[ATTR_RADIUS]
)
if within_zone and (closer_zone or smaller_zone):
min_dist = zone_dist
closest = zone
return closest
async def async_setup(hass, config):
"""Set up configured zones as well as home assistant zone if necessary."""
hass.data[DOMAIN] = {}
entities = set()
zone_entries = configured_zones(hass)
for _, entry in config_per_platform(config, DOMAIN):
if slugify(entry[CONF_NAME]) not in zone_entries:
zone = Zone(
hass,
entry[CONF_NAME],
entry[CONF_LATITUDE],
entry[CONF_LONGITUDE],
entry.get(CONF_RADIUS),
entry.get(CONF_ICON),
entry.get(CONF_PASSIVE),
)
zone.entity_id = async_generate_entity_id(
ENTITY_ID_FORMAT, entry[CONF_NAME], entities
)
hass.async_create_task(zone.async_update_ha_state())
entities.add(zone.entity_id)
if ENTITY_ID_HOME in entities or HOME_ZONE in zone_entries:
return True
zone = Zone(
hass,
hass.config.location_name,
hass.config.latitude,
hass.config.longitude,
DEFAULT_RADIUS,
ICON_HOME,
False,
)
zone.entity_id = ENTITY_ID_HOME
hass.async_create_task(zone.async_update_ha_state())
@callback
def core_config_updated(_):
"""Handle core config updated."""
zone.name = hass.config.location_name
zone.latitude = hass.config.latitude
zone.longitude = hass.config.longitude
zone.async_write_ha_state()
hass.bus.async_listen(EVENT_CORE_CONFIG_UPDATE, core_config_updated)
return True
async def async_setup_entry(hass, config_entry):
"""Set up zone as config entry."""
entry = config_entry.data
name = entry[CONF_NAME]
zone = Zone(
hass,
name,
entry[CONF_LATITUDE],
entry[CONF_LONGITUDE],
entry.get(CONF_RADIUS, DEFAULT_RADIUS),
entry.get(CONF_ICON),
entry.get(CONF_PASSIVE, DEFAULT_PASSIVE),
)
zone.entity_id = async_generate_entity_id(ENTITY_ID_FORMAT, name, None, hass)
hass.async_create_task(zone.async_update_ha_state())
hass.data[DOMAIN][slugify(name)] = zone
return True
async def async_unload_entry(hass, config_entry):
"""Unload a config entry."""
zones = hass.data[DOMAIN]
name = slugify(config_entry.data[CONF_NAME])
zone = zones.pop(name)
await zone.async_remove()
return True
|
{
"content_hash": "5e9d42eeeddaf1bc846a106a9cc3543e",
"timestamp": "",
"source": "github",
"line_count": 169,
"max_line_length": 81,
"avg_line_length": 29.91715976331361,
"alnum_prop": 0.6416139240506329,
"repo_name": "fbradyirl/home-assistant",
"id": "2ee03c08189aeaa3dc1e366631794058b9642109",
"size": "5056",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "homeassistant/components/zone/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1175"
},
{
"name": "Dockerfile",
"bytes": "1829"
},
{
"name": "Python",
"bytes": "16494727"
},
{
"name": "Ruby",
"bytes": "745"
},
{
"name": "Shell",
"bytes": "17784"
}
],
"symlink_target": ""
}
|
class param:
## The constructor
def __init__(self):
self.verbose = 1
# check if needed Python libraries are already installed or not
import sys
import getopt
import sct_utils as sct
import nibabel
import os
def main():
#Initialization
fname = ''
verbose = param.verbose
try:
opts, args = getopt.getopt(sys.argv[1:],'hi:v:')
except getopt.GetoptError:
usage()
for opt, arg in opts :
if opt == '-h':
usage()
elif opt in ("-i"):
fname = arg
elif opt in ('-v'):
verbose = int(arg)
# display usage if a mandatory argument is not provided
if fname == '' :
usage()
# check existence of input files
print'\nCheck if file exists ...'
sct.check_file_exist(fname)
# Display arguments
print'\nCheck input arguments...'
print' Input volume ...................... '+fname
print' Verbose ........................... '+str(verbose)
file = nibabel.load(fname)
data = file.get_data()
hdr = file.get_header()
X,Y,Z = (data>0).nonzero()
x_max,y_max = (data[:,:,max(Z)]).nonzero()
x_max = x_max[0]
y_max = y_max[0]
z_max = max(Z)
x_min,y_min = (data[:,:,min(Z)]).nonzero()
x_min = x_min[0]
y_min = y_min[0]
z_min = min(Z)
del data
print 'Coords extrema : min [ ' + str(x_min) + ' ,' + str(y_min) + ' ,' + str(z_min) +' ] max [ ' + str(x_max) + ' ,' + str(y_max) + ' ,' + str(z_max) + ' ]'
return z_min,z_max
def usage():
print """
"""+os.path.basename(__file__)+"""
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Part of the Spinal Cord Toolbox <https://sourceforge.net/projects/spinalcordtoolbox>
DESCRIPTION
Detect coordinates of minimum and maximum nonzero voxels when inputing a straight centerline.
USAGE
"""+os.path.basename(__file__)+""" -i <input_volume>
MANDATORY ARGUMENTS
-i <input_volume> straight centerline. No Default value
OPTIONAL ARGUMENTS
-v {0,1} verbose. Default="""+str(param.verbose)+"""
-h help. Show this message
EXAMPLE
"""+os.path.basename(__file__)+""" -i straight_centerline.nii.gz\n"""
# exit program
sys.exit(2)
#=======================================================================================================================
# Start program
#=======================================================================================================================
if __name__ == "__main__":
# initialize parameters
param = param()
# call main function
main()
|
{
"content_hash": "58f1974918e3d6f8d62872102a3f01e9",
"timestamp": "",
"source": "github",
"line_count": 105,
"max_line_length": 162,
"avg_line_length": 26.323809523809523,
"alnum_prop": 0.46888567293777134,
"repo_name": "3324fr/spinalcordtoolbox",
"id": "a97d615ede74c0bb865e0dd7ccdf6bfe99ae40e6",
"size": "3240",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dev/template_creation/sct_detect_extrema.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "5961"
},
{
"name": "C++",
"bytes": "1025992"
},
{
"name": "CMake",
"bytes": "18919"
},
{
"name": "CSS",
"bytes": "1384"
},
{
"name": "Groff",
"bytes": "3141"
},
{
"name": "HTML",
"bytes": "5315"
},
{
"name": "JavaScript",
"bytes": "2505"
},
{
"name": "KiCad",
"bytes": "5522"
},
{
"name": "Matlab",
"bytes": "275100"
},
{
"name": "Python",
"bytes": "4808677"
},
{
"name": "Shell",
"bytes": "193192"
}
],
"symlink_target": ""
}
|
from .datatables import DataTables
from .editor import Editor
|
{
"content_hash": "958f9c7ccb6ca19912a9dff929896857",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 34,
"avg_line_length": 31,
"alnum_prop": 0.8387096774193549,
"repo_name": "wholeshoot/mongo_datatables",
"id": "b4df5180317a921305144d69712cee5636c5c0a9",
"size": "62",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mongo_datatables/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "9513"
}
],
"symlink_target": ""
}
|
from django.conf import settings
from django.db import models as djmodels
import importlib
import inspect
def get_installed_apps():
"""
Grabs the list of installed apps and returns the apps that don't
belong to django nor the condenser app
"""
newlist = []
for app in settings.INSTALLED_APPS:
if not app.startswith('django') and app != 'condenser':
newlist.append(app)
return newlist
def get_app_models(app):
"""
Imports the app that is passed as parameter
returns list of tuples (MODELNAME, MODELCLASS)
"""
module = importlib.import_module(app)
models_list = inspect.getmembers(module,
# Not a fan of having to compare the module name to the parameter 'app',
# but the import_module method will uncover the true namespace of the module,
# breaking our unit tests
lambda member: inspect.isclass(member) and member.__module__ == app
)
return models_list
def get_model_fields(app, model):
app = importlib.import_module(app)
model = getattr(app, model)
fields = model._meta.get_all_field_names()
return fields
|
{
"content_hash": "548c589e39e5316ba285c6fae0ce4d90",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 89,
"avg_line_length": 32.19444444444444,
"alnum_prop": 0.6635030198446937,
"repo_name": "senordev/django-condenser",
"id": "88cc6589f6021b6835c6db8abaca08d5cffa7c0a",
"size": "1159",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "condenser/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "3321"
}
],
"symlink_target": ""
}
|
"""
A few miscellaneous helper functions for pyglmnet.py
"""
import numpy as np
from copy import copy
import logging
logger = logging.getLogger('pyglmnet')
logger.addHandler(logging.StreamHandler())
def softmax(w):
"""Softmax function of given array of number w.
Parameters
----------
w: array | list
The array of numbers.
Returns
-------
dist: array
The resulting array with values ranging from 0 to 1.
"""
w = np.array(w)
maxes = np.amax(w, axis=1)
maxes = maxes.reshape(maxes.shape[0], 1)
e = np.exp(w - maxes)
dist = e / np.sum(e, axis=1, keepdims=True)
return dist
def label_binarizer(y):
"""Mimics scikit learn's LabelBinarizer
Parameters
----------
y: ndarray, shape (n_samples, )
one dimensional array of class labels
Returns
-------
yb: array, shape (n_samples, n_classes)
one-hot encoding of labels in y
"""
if y.ndim != 1:
raise ValueError('y has to be one-dimensional')
y_flat = y.ravel()
yb = np.zeros([len(y), y.max() + 1])
yb[np.arange(len(y)), y_flat] = 1
return yb
def tikhonov_from_prior(prior_cov, n_samples, threshold=0.0001):
"""Given a prior covariance matrix, returns a Tikhonov matrix
Parameters
----------
prior_cov: array \n
prior covariance matrix of shape (n_features x n_features)
n_samples: int \n
number of samples
threshold: float \n
ratio of largest to smallest singular value to
approximate matrix inversion using SVD
Returns
-------
Tau: array \n
Tikhonov matrix of shape (n_features x n_features)
"""
[U, S, V] = np.linalg.svd(prior_cov, full_matrices=False)
S_ratio = S / S.max()
nonzero_indices = np.where(S_ratio > threshold)[0]
zero_indices = np.where(S_ratio <= threshold)[0]
S_inv = copy(np.sqrt(S))
S_inv[zero_indices] = threshold
S_inv[nonzero_indices] = 1. / S_inv[nonzero_indices]
Tau = np.dot(np.diag(S_inv), V)
n_features = Tau.shape[0]
Tau = 1. / n_features * Tau
return Tau
def _check_params(distr, max_iter, fit_intercept):
from .pyglmnet import ALLOWED_DISTRS
if distr not in ALLOWED_DISTRS:
raise ValueError('distr must be one of %s, Got '
'%s' % (', '.join(ALLOWED_DISTRS), distr))
if not isinstance(max_iter, int):
raise ValueError('max_iter must be of type int')
if not isinstance(fit_intercept, bool):
raise ValueError('fit_intercept must be bool, got %s'
% type(fit_intercept))
def set_log_level(verbose):
"""Convenience function for setting the log level.
Parameters
----------
verbose : bool, str, int, or None
The verbosity of messages to print. If a str, it can be either DEBUG,
INFO, WARNING, ERROR, or CRITICAL. Note that these are for
convenience and are equivalent to passing in logging.DEBUG, etc.
For bool, True is the same as 'INFO', False is the same as 'WARNING'.
"""
if isinstance(verbose, bool):
if verbose is True:
verbose = 'INFO'
else:
verbose = 'WARNING'
if isinstance(verbose, str):
verbose = verbose.upper()
logging_types = dict(DEBUG=logging.DEBUG, INFO=logging.INFO,
WARNING=logging.WARNING, ERROR=logging.ERROR,
CRITICAL=logging.CRITICAL)
if verbose not in logging_types:
raise ValueError('verbose must be of a valid type')
verbose = logging_types[verbose]
logger.setLevel(verbose)
|
{
"content_hash": "c3a78b14e434f7fd342629de9c67a679",
"timestamp": "",
"source": "github",
"line_count": 132,
"max_line_length": 77,
"avg_line_length": 27.734848484848484,
"alnum_prop": 0.6022944550669216,
"repo_name": "pavanramkumar/pyglmnet",
"id": "a46d2f0c14b4868a6edd6ecf7dccdd5dca43eb94",
"size": "3661",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyglmnet/utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1285"
},
{
"name": "Python",
"bytes": "128554"
},
{
"name": "TeX",
"bytes": "2395"
}
],
"symlink_target": ""
}
|
"""Git implementation of _version.py."""
import errno
import os
import re
import subprocess
import sys
def get_keywords():
"""Get the keywords needed to look up the version information."""
# these strings will be replaced by git during git-archive.
# setup.py/versioneer.py will grep for the variable names, so they must
# each be defined on a line of their own. _version.py will just call
# get_keywords().
git_refnames = "$Format:%d$"
git_full = "$Format:%H$"
git_date = "$Format:%ci$"
keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
return keywords
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
def get_config():
"""Create, populate and return the VersioneerConfig() object."""
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
cfg = VersioneerConfig()
cfg.VCS = "git"
cfg.style = "pep440"
cfg.tag_prefix = ""
cfg.parentdir_prefix = "memote-"
cfg.versionfile_source = "src/memote/_version.py"
cfg.verbose = False
return cfg
class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Decorator to mark a method as the handler for a particular VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, env=env,
stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % dispcmd)
print(e)
return None, None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None, None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % dispcmd)
print("stdout was %s" % stdout)
return None, p.returncode
return stdout, p.returncode
def versions_from_parentdir(parentdir_prefix, root, verbose):
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes both
the project name and a version string. We will also support searching up
two directory levels for an appropriately named parent directory
"""
rootdirs = []
for i in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None, "date": None}
else:
rootdirs.append(root)
root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %s but none started with prefix %s" %
(str(rootdirs), parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
if line.strip().startswith("git_date ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["date"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
if not keywords:
raise NotThisMethod("no keywords at all, weird")
date = keywords.get("date")
if date is not None:
# git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
# it's been around since git-1.5.3, and it's too difficult to
# discover which version we're using, or to work around using an
# older one.
date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs - tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None,
"date": date}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags", "date": None}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
expanded, and _version.py hasn't already been rewritten with a short
version string, meaning we're inside a checked out source tree.
"""
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
hide_stderr=True)
if rc != 0:
if verbose:
print("Directory %s not under git control" % root)
raise NotThisMethod("'git rev-parse --git-dir' returned error")
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long",
"--match", "%s*" % tag_prefix],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%s'"
% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
cwd=root)
pieces["distance"] = int(count_out) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
cwd=root)[0].strip()
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
def plus_or_dot(pieces):
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
"""Build up version string, with post-release "local version identifier".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
Exceptions:
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%d.g%s" % (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
"""TAG[.post.devDISTANCE] -- No -dirty.
Exceptions:
1: no tags. 0.post.devDISTANCE
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post.dev%d" % pieces["distance"]
else:
# exception #1
rendered = "0.post.dev%d" % pieces["distance"]
return rendered
def render_pep440_post(pieces):
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear "older" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%s" % pieces["short"]
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%s" % pieces["short"]
return rendered
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
Eexceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
"""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
The distance/hash is unconditional.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
"""Render the given version pieces into the requested style."""
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"],
"date": None}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
raise ValueError("unknown style '%s'" % style)
return {"version": rendered, "full-revisionid": pieces["long"],
"dirty": pieces["dirty"], "error": None,
"date": pieces.get("date")}
def get_versions():
"""Get version information or return default if unable to do so."""
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
# case we can only use expanded keywords.
cfg = get_config()
verbose = cfg.verbose
try:
return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
verbose)
except NotThisMethod:
pass
try:
root = os.path.realpath(__file__)
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
for i in cfg.versionfile_source.split('/'):
root = os.path.dirname(root)
except NameError:
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to find root of source tree",
"date": None}
try:
pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
return render(pieces, cfg.style)
except NotThisMethod:
pass
try:
if cfg.parentdir_prefix:
return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
except NotThisMethod:
pass
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to compute version", "date": None}
|
{
"content_hash": "2edf6d479cb38e90b704715c2023989c",
"timestamp": "",
"source": "github",
"line_count": 510,
"max_line_length": 79,
"avg_line_length": 35.252941176470586,
"alnum_prop": 0.5697202291562379,
"repo_name": "biosustain/memote",
"id": "6d99373edc14effc1d81c2e262f0640073f1baa6",
"size": "18454",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "src/memote/_version.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "14738"
},
{
"name": "Makefile",
"bytes": "2273"
},
{
"name": "Python",
"bytes": "188798"
},
{
"name": "Shell",
"bytes": "281"
}
],
"symlink_target": ""
}
|
from tempest_lib import exceptions as lib_exc
import testtools
from tempest.api.compute import base
from tempest import config
from tempest import test
CONF = config.CONF
class SecurityGroupDefaultRulesTest(base.BaseV2ComputeAdminTest):
@classmethod
# TODO(GMann): Once Bug# 1311500 is fixed, these test can run
# for Neutron also.
@testtools.skipIf(CONF.service_available.neutron,
"Skip as this functionality is not yet "
"implemented in Neutron. Related Bug#1311500")
def setup_credentials(cls):
# A network and a subnet will be created for these tests
cls.set_network_resources(network=True, subnet=True)
super(SecurityGroupDefaultRulesTest, cls).setup_credentials()
@classmethod
def setup_clients(cls):
super(SecurityGroupDefaultRulesTest, cls).setup_clients()
cls.adm_client = cls.os_adm.security_group_default_rules_client
def _create_security_group_default_rules(self, ip_protocol='tcp',
from_port=22, to_port=22,
cidr='10.10.0.0/24'):
# Create Security Group default rule
rule = self.adm_client.create_security_default_group_rule(
ip_protocol,
from_port,
to_port,
cidr=cidr)
self.assertEqual(ip_protocol, rule['ip_protocol'])
self.assertEqual(from_port, rule['from_port'])
self.assertEqual(to_port, rule['to_port'])
self.assertEqual(cidr, rule['ip_range']['cidr'])
return rule
@test.idempotent_id('6d880615-eec3-4d29-97c5-7a074dde239d')
def test_create_delete_security_group_default_rules(self):
# Create and delete Security Group default rule
ip_protocols = ['tcp', 'udp', 'icmp']
for ip_protocol in ip_protocols:
rule = self._create_security_group_default_rules(ip_protocol)
# Delete Security Group default rule
self.adm_client.delete_security_group_default_rule(rule['id'])
self.assertRaises(lib_exc.NotFound,
self.adm_client.get_security_group_default_rule,
rule['id'])
@test.idempotent_id('4d752e0a-33a1-4c3a-b498-ff8667ca22e5')
def test_create_security_group_default_rule_without_cidr(self):
ip_protocol = 'udp'
from_port = 80
to_port = 80
rule = self.adm_client.create_security_default_group_rule(
ip_protocol,
from_port,
to_port)
self.addCleanup(self.adm_client.delete_security_group_default_rule,
rule['id'])
self.assertNotEqual(0, rule['id'])
self.assertEqual('0.0.0.0/0', rule['ip_range']['cidr'])
@test.idempotent_id('29f2d218-69b0-4a95-8f3d-6bd0ef732b3a')
def test_create_security_group_default_rule_with_blank_cidr(self):
ip_protocol = 'icmp'
from_port = 10
to_port = 10
cidr = ''
rule = self.adm_client.create_security_default_group_rule(
ip_protocol,
from_port,
to_port,
cidr=cidr)
self.addCleanup(self.adm_client.delete_security_group_default_rule,
rule['id'])
self.assertNotEqual(0, rule['id'])
self.assertEqual('0.0.0.0/0', rule['ip_range']['cidr'])
@test.idempotent_id('6e6de55e-9146-4ae0-89f2-3569586e0b9b')
def test_security_group_default_rules_list(self):
ip_protocol = 'tcp'
from_port = 22
to_port = 22
cidr = '10.10.0.0/24'
rule = self._create_security_group_default_rules(ip_protocol,
from_port,
to_port,
cidr)
self.addCleanup(self.adm_client.delete_security_group_default_rule,
rule['id'])
rules = self.adm_client.list_security_group_default_rules()
self.assertNotEqual(0, len(rules))
self.assertIn(rule, rules)
@test.idempotent_id('15cbb349-86b4-4f71-a048-04b7ef3f150b')
def test_default_security_group_default_rule_show(self):
ip_protocol = 'tcp'
from_port = 22
to_port = 22
cidr = '10.10.0.0/24'
rule = self._create_security_group_default_rules(ip_protocol,
from_port,
to_port,
cidr)
self.addCleanup(self.adm_client.delete_security_group_default_rule,
rule['id'])
fetched_rule = self.adm_client.get_security_group_default_rule(
rule['id'])
self.assertEqual(rule, fetched_rule)
|
{
"content_hash": "14634bdcf76693d4296840cf00f1ae29",
"timestamp": "",
"source": "github",
"line_count": 116,
"max_line_length": 78,
"avg_line_length": 42.23275862068966,
"alnum_prop": 0.5660338844662176,
"repo_name": "yamt/tempest",
"id": "c1fe18cc932e2ac16660e0d47d251441588a66c9",
"size": "5530",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "tempest/api/compute/admin/test_security_group_default_rules.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "2739641"
},
{
"name": "Shell",
"bytes": "8560"
}
],
"symlink_target": ""
}
|
from django import template
register = template.Library()
@register.filter(name='std_level_prompt')
def std_level_prompt(doc):
"""
Returns the name from the std level names table corresponding
to the object's intended_std_level (with the word RFC appended in some
cases), or a prompt requesting that the intended_std_level be set."""
prompt = "*** YOU MUST SELECT AN INTENDED STATUS FOR THIS DRAFT AND REGENERATE THIS TEXT ***"
if doc.intended_std_level:
prompt = doc.intended_std_level.name
if doc.intended_std_level_id in ('inf','exp','hist'):
prompt = prompt + " RFC"
return prompt
@register.filter(name='std_level_prompt_with_article')
def std_level_prompt_with_article(doc):
"""
Returns the standard level prompt prefixed with an appropriate article."""
# This is a very crude way to select between "a" and "an", but will
# work for the standards levels in the standards level names table
# Grammar war alert: This will generate "an historic"
article = ""
if doc.intended_std_level:
article = "a"
if doc.intended_std_level.name[0].lower() in "aehiou":
article = "an"
return article+" "+std_level_prompt(doc)
|
{
"content_hash": "b83fe13581bf207eb6858990e7d9c841",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 97,
"avg_line_length": 34.22222222222222,
"alnum_prop": 0.6737012987012987,
"repo_name": "wpjesus/codematch",
"id": "32e8dd0ca8cb2343200e8554526894a93ec685bf",
"size": "1232",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "ietf/doc/templatetags/mail_filters.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "139492"
},
{
"name": "CSS",
"bytes": "733662"
},
{
"name": "Groff",
"bytes": "2349"
},
{
"name": "HTML",
"bytes": "2149789"
},
{
"name": "JavaScript",
"bytes": "1003699"
},
{
"name": "Makefile",
"bytes": "3407"
},
{
"name": "Perl",
"bytes": "17323"
},
{
"name": "PostScript",
"bytes": "35"
},
{
"name": "PowerShell",
"bytes": "468"
},
{
"name": "Python",
"bytes": "4536908"
},
{
"name": "Shell",
"bytes": "74113"
},
{
"name": "TeX",
"bytes": "2556"
}
],
"symlink_target": ""
}
|
from events import *
from api import AnalyticsApi as Api
|
{
"content_hash": "bc726ddfe44435db0c49503e44b7ef0d",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 35,
"avg_line_length": 28,
"alnum_prop": 0.8214285714285714,
"repo_name": "DaveA50/lbry",
"id": "598751034b2219d48f93140e0f7138a889156125",
"size": "56",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lbrynet/analytics/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PowerShell",
"bytes": "3242"
},
{
"name": "Python",
"bytes": "1167874"
},
{
"name": "Shell",
"bytes": "15744"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
import responses
import six
import copy
from sentry.shared_integrations.exceptions import IntegrationError
from sentry.models import ExternalIssue
from sentry.utils.http import absolute_uri
from sentry.testutils.factories import DEFAULT_EVENT_DATA
from sentry.testutils.helpers.datetime import iso_format, before_now
from .testutils import GitLabTestCase
class GitlabIssuesTest(GitLabTestCase):
def setUp(self):
super(GitlabIssuesTest, self).setUp()
min_ago = iso_format(before_now(minutes=1))
event = self.store_event(
data={
"event_id": "a" * 32,
"message": "message",
"timestamp": min_ago,
"stacktrace": copy.deepcopy(DEFAULT_EVENT_DATA["stacktrace"]),
},
project_id=self.project.id,
)
self.group = event.group
def test_make_external_key(self):
project_name = "getsentry/sentry"
issue_iid = "7"
external_key = "%s#%s" % (project_name, issue_iid)
domain_name = self.installation.model.metadata["domain_name"]
data = {"key": external_key}
assert self.installation.make_external_key(data) == "%s:%s" % (domain_name, external_key)
def test_get_issue_url(self):
issue_id = "example.gitlab.com:project/project#7"
assert (
self.installation.get_issue_url(issue_id)
== "https://example.gitlab.com/project/project/issues/7"
)
@responses.activate
def test_get_create_issue_config(self):
group_description = (
u"Sentry Issue: [%s](%s)\n\n"
"```\nStacktrace (most recent call first):\n\n"
' File "sentry/models/foo.py", line 29, in build_msg\n'
" string_max_length=self.string_max_length)\n\nmessage\n```"
) % (
self.group.qualified_short_id,
absolute_uri(self.group.get_absolute_url(params={"referrer": "gitlab_integration"})),
)
responses.add(
responses.GET,
u"https://example.gitlab.com/api/v4/groups/%s/projects"
% self.installation.model.metadata["group_id"],
json=[
{"name_with_namespace": "getsentry / sentry", "id": 1},
{"name_with_namespace": "getsentry / hello", "id": 22},
],
)
assert self.installation.get_create_issue_config(self.group, self.user) == [
{
"url": "/extensions/gitlab/search/baz/%d/" % self.installation.model.id,
"name": "project",
"required": True,
"type": "select",
"label": "GitLab Project",
"choices": [(1, u"getsentry / sentry"), (22, u"getsentry / hello")],
"defaultValue": 1,
},
{
"name": "title",
"label": "Title",
"default": self.group.get_latest_event().title,
"type": "string",
"required": True,
},
{
"name": "description",
"label": "Description",
"default": group_description,
"type": "textarea",
"autosize": True,
"maxRows": 10,
},
]
@responses.activate
def test_get_link_issue_config(self):
responses.add(
responses.GET,
u"https://example.gitlab.com/api/v4/groups/%s/projects"
% self.installation.model.metadata["group_id"],
json=[
{"name_with_namespace": "getsentry / sentry", "id": 1},
{"name_with_namespace": "getsentry / hello", "id": 22},
],
)
autocomplete_url = "/extensions/gitlab/search/baz/%d/" % self.installation.model.id
assert self.installation.get_link_issue_config(self.group) == [
{
"name": "project",
"label": "GitLab Project",
"type": "select",
"default": 1,
"choices": [(1, u"getsentry / sentry"), (22, u"getsentry / hello")],
"url": autocomplete_url,
"updatesForm": True,
"required": True,
},
{
"name": "externalIssue",
"label": "Issue",
"default": "",
"type": "select",
"url": autocomplete_url,
"required": True,
},
{
"name": "comment",
"label": "Comment",
"default": u"Sentry issue: [{issue_id}]({url})".format(
url=absolute_uri(
self.group.get_absolute_url(params={"referrer": "gitlab_integration"})
),
issue_id=self.group.qualified_short_id,
),
"type": "textarea",
"required": False,
"help": ("Leave blank if you don't want to " "add a comment to the GitLab issue."),
},
]
@responses.activate
def test_create_issue(self):
issue_iid = "1"
project_id = "10"
project_name = "getsentry/sentry"
key = "%s#%s" % (project_name, issue_iid)
responses.add(
responses.POST,
u"https://example.gitlab.com/api/v4/projects/%s/issues" % project_id,
json={
"id": 8,
"iid": issue_iid,
"title": "hello",
"description": "This is the description",
"web_url": "https://example.gitlab.com/%s/issues/%s" % (project_name, issue_iid),
},
)
responses.add(
responses.GET,
u"https://example.gitlab.com/api/v4/projects/%s" % project_id,
json={"path_with_namespace": project_name, "id": 10},
)
form_data = {
"project": project_id,
"title": "hello",
"description": "This is the description",
}
assert self.installation.create_issue(form_data) == {
"key": key,
"description": "This is the description",
"title": "hello",
"url": "https://example.gitlab.com/%s/issues/%s" % (project_name, issue_iid),
"project": project_id,
"metadata": {"display_name": key},
}
@responses.activate
def test_get_issue(self):
project_id = "12"
project_name = "getsentry/sentry"
issue_iid = "13"
key = "%s#%s" % (project_name, issue_iid)
responses.add(
responses.GET,
u"https://example.gitlab.com/api/v4/projects/%s/issues/%s" % (project_id, issue_iid),
json={
"id": 18,
"iid": issue_iid,
"title": "hello",
"description": "This is the description",
"web_url": "https://example.gitlab.com/%s/issues/%s" % (project_name, issue_iid),
},
)
responses.add(
responses.GET,
u"https://example.gitlab.com/api/v4/projects/%s" % project_id,
json={"id": project_id, "path_with_namespace": project_name},
)
assert self.installation.get_issue(issue_id="%s#%s" % (project_id, issue_iid), data={}) == {
"key": key,
"description": "This is the description",
"title": "hello",
"url": "https://example.gitlab.com/%s/issues/%s" % (project_name, issue_iid),
"project": project_id,
"metadata": {"display_name": key},
}
@responses.activate
def test_create_issue_default_project_in_group_api_call(self):
group_description = (
u"Sentry Issue: [%s](%s)\n\n"
"```\nStacktrace (most recent call first):\n\n"
' File "sentry/models/foo.py", line 29, in build_msg\n'
" string_max_length=self.string_max_length)\n\nmessage\n```"
) % (
self.group.qualified_short_id,
absolute_uri(self.group.get_absolute_url(params={"referrer": "gitlab_integration"})),
)
project_id = 10
project_name = "This_is / a_project"
org_integration = self.installation.org_integration
org_integration.config["project_issue_defaults"] = {
six.text_type(self.group.project_id): {"project": project_id}
}
org_integration.save()
responses.add(
responses.GET,
u"https://example.gitlab.com/api/v4/groups/%s/projects"
% self.installation.model.metadata["group_id"],
json=[
{"name_with_namespace": "getsentry / sentry", "id": 1},
{"name_with_namespace": project_name, "id": project_id},
{"name_with_namespace": "getsentry / hello", "id": 22},
],
)
responses.add(
responses.GET,
u"https://example.gitlab.com/api/v4/projects/%s" % project_id,
json={"path_with_namespace": project_name, "id": project_id},
)
assert self.installation.get_create_issue_config(self.group, self.user) == [
{
"url": "/extensions/gitlab/search/baz/%d/" % self.installation.model.id,
"name": "project",
"required": True,
"choices": [
(1, u"getsentry / sentry"),
(10, u"This_is / a_project"),
(22, u"getsentry / hello"),
],
"defaultValue": project_id,
"type": "select",
"label": "GitLab Project",
},
{
"name": "title",
"label": "Title",
"default": self.group.get_latest_event().title,
"type": "string",
"required": True,
},
{
"name": "description",
"label": "Description",
"default": group_description,
"type": "textarea",
"autosize": True,
"maxRows": 10,
},
]
@responses.activate
def test_create_issue_default_project_not_in_api_call(self):
group_description = (
u"Sentry Issue: [%s](%s)\n\n"
"```\nStacktrace (most recent call first):\n\n"
' File "sentry/models/foo.py", line 29, in build_msg\n'
" string_max_length=self.string_max_length)\n\nmessage\n```"
) % (
self.group.qualified_short_id,
absolute_uri(self.group.get_absolute_url(params={"referrer": "gitlab_integration"})),
)
project_id = 10
project_name = "This_is / a_project"
org_integration = self.installation.org_integration
org_integration.config["project_issue_defaults"] = {
six.text_type(self.group.project_id): {"project": project_id}
}
org_integration.save()
responses.add(
responses.GET,
u"https://example.gitlab.com/api/v4/groups/%s/projects"
% self.installation.model.metadata["group_id"],
json=[
{"name_with_namespace": "getsentry / sentry", "id": 1},
{"name_with_namespace": "getsentry / hello", "id": 22},
],
)
responses.add(
responses.GET,
u"https://example.gitlab.com/api/v4/projects/%s" % project_id,
json={"name_with_namespace": project_name, "id": project_id},
)
assert self.installation.get_create_issue_config(self.group, self.user) == [
{
"url": "/extensions/gitlab/search/baz/%d/" % self.installation.model.id,
"name": "project",
"required": True,
"choices": [
(10, u"This_is / a_project"),
(1, u"getsentry / sentry"),
(22, u"getsentry / hello"),
],
"defaultValue": project_id,
"type": "select",
"label": "GitLab Project",
},
{
"name": "title",
"label": "Title",
"default": self.group.get_latest_event().title,
"type": "string",
"required": True,
},
{
"name": "description",
"label": "Description",
"default": group_description,
"type": "textarea",
"autosize": True,
"maxRows": 10,
},
]
@responses.activate
def test_create_issue_no_projects(self):
group_description = (
u"Sentry Issue: [%s](%s)\n\n"
"```\nStacktrace (most recent call first):\n\n"
' File "sentry/models/foo.py", line 29, in build_msg\n'
" string_max_length=self.string_max_length)\n\nmessage\n```"
) % (
self.group.qualified_short_id,
absolute_uri(self.group.get_absolute_url(params={"referrer": "gitlab_integration"})),
)
responses.add(
responses.GET,
u"https://example.gitlab.com/api/v4/groups/%s/projects"
% self.installation.model.metadata["group_id"],
json=[],
)
assert self.installation.get_create_issue_config(self.group, self.user) == [
{
"url": "/extensions/gitlab/search/baz/%d/" % self.installation.model.id,
"name": "project",
"required": True,
"choices": [],
"defaultValue": "",
"type": "select",
"label": "GitLab Project",
},
{
"name": "title",
"label": "Title",
"default": self.group.get_latest_event().title,
"type": "string",
"required": True,
},
{
"name": "description",
"label": "Description",
"default": group_description,
"type": "textarea",
"autosize": True,
"maxRows": 10,
},
]
@responses.activate
def test_after_link_issue(self):
responses.add(
responses.POST,
u"https://example.gitlab.com/api/v4/projects/2/issues/321/notes",
json=[],
)
data = {"externalIssue": "2#321", "comment": "This is not good."}
external_issue = ExternalIssue.objects.create(
organization_id=self.organization.id, integration_id=self.integration.id, key="2#321"
)
self.installation.after_link_issue(external_issue, data=data)
def test_after_link_issue_required_fields(self):
data = {"externalIssue": "2#231", "comment": "This is not good."}
external_issue = ExternalIssue.objects.create(
organization_id=self.organization.id, integration_id=self.integration.id, key="#"
)
with self.assertRaises(IntegrationError):
self.installation.after_link_issue(external_issue, data=data)
@responses.activate
def test_after_link_issue_failure(self):
responses.add(
responses.POST,
u"https://example.gitlab.com/api/v4/projects/2/issues/321/notes",
status=502,
)
data = {"externalIssue": "2#321", "comment": "This is not good."}
external_issue = ExternalIssue.objects.create(
organization_id=self.organization.id, integration_id=self.integration.id, key="2#321"
)
with self.assertRaises(IntegrationError):
self.installation.after_link_issue(external_issue, data=data)
|
{
"content_hash": "d473967974962ed781c2f12d843694b1",
"timestamp": "",
"source": "github",
"line_count": 414,
"max_line_length": 100,
"avg_line_length": 38.51932367149758,
"alnum_prop": 0.497209506490249,
"repo_name": "beeftornado/sentry",
"id": "41570aa7e815b5bc160c0e786411e8b9db5dbf4b",
"size": "15947",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/sentry/integrations/gitlab/test_issues.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "157195"
},
{
"name": "HTML",
"bytes": "197026"
},
{
"name": "JavaScript",
"bytes": "380379"
},
{
"name": "Makefile",
"bytes": "2832"
},
{
"name": "Python",
"bytes": "6473603"
}
],
"symlink_target": ""
}
|
from typing import Dict
from allennlp.data.fields.field import DataArray, Field
from allennlp.data.vocabulary import Vocabulary
class Instance:
"""
An ``Instance`` is a collection of :class:`~allennlp.data.fields.field.Field` objects,
specifying the inputs and outputs to
some model. We don't make a distinction between inputs and outputs here, though - all
operations are done on all fields, and when we return arrays, we return them as dictionaries
keyed by field name. A model can then decide which fields it wants to use as inputs as which
as outputs.
The ``Fields`` in an ``Instance`` can start out either indexed or un-indexed. During the data
processing pipeline, all fields will end up as ``IndexedFields``, and will then be converted
into padded arrays by a ``DataGenerator``.
Parameters
----------
fields : ``Dict[str, Field]``
The ``Field`` objects that will be used to produce data arrays for this instance.
"""
def __init__(self, fields: Dict[str, Field]) -> None:
self.fields = fields
def count_vocab_items(self, counter: Dict[str, Dict[str, int]]):
"""
Increments counts in the given ``counter`` for all of the vocabulary items in all of the
``Fields`` in this ``Instance``.
"""
for field in self.fields.values():
field.count_vocab_items(counter)
def index_fields(self, vocab: Vocabulary):
"""
Converts all ``UnindexedFields`` in this ``Instance`` to ``IndexedFields``, given the
``Vocabulary``. This `mutates` the current object, it does not return a new ``Instance``.
"""
for field in self.fields.values():
field.index(vocab)
def get_padding_lengths(self) -> Dict[str, Dict[str, int]]:
"""
Returns a dictionary of padding lengths, keyed by field name. Each ``Field`` returns a
mapping from padding keys to actual lengths, and we just key that dictionary by field name.
"""
lengths = {}
for field_name, field in self.fields.items():
lengths[field_name] = field.get_padding_lengths()
return lengths
def as_tensor_dict(self,
padding_lengths: Dict[str, Dict[str, int]] = None,
cuda_device: int = -1,
for_training: bool = True) -> Dict[str, DataArray]:
"""
Pads each ``Field`` in this instance to the lengths given in ``padding_lengths`` (which is
keyed by field name, then by padding key, the same as the return value in
:func:`get_padding_lengths`), returning a list of torch tensors for each field.
If ``padding_lengths`` is omitted, we will call ``self.get_padding_lengths()`` to get the
sizes of the tensors to create.
"""
padding_lengths = padding_lengths or self.get_padding_lengths()
tensors = {}
for field_name, field in self.fields.items():
tensors[field_name] = field.as_tensor(padding_lengths[field_name],
cuda_device=cuda_device,
for_training=for_training)
return tensors
|
{
"content_hash": "316fc9c08df5cc6190f031f5e944daba",
"timestamp": "",
"source": "github",
"line_count": 72,
"max_line_length": 99,
"avg_line_length": 45.06944444444444,
"alnum_prop": 0.6147919876733436,
"repo_name": "nafitzgerald/allennlp",
"id": "025e09359dff56da30ce06469ddf4f776c61024a",
"size": "3245",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "allennlp/data/instance.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "5564"
},
{
"name": "Cuda",
"bytes": "18610"
},
{
"name": "Jupyter Notebook",
"bytes": "36610"
},
{
"name": "Makefile",
"bytes": "1478"
},
{
"name": "Perl",
"bytes": "43067"
},
{
"name": "Python",
"bytes": "1247761"
},
{
"name": "Shell",
"bytes": "13919"
}
],
"symlink_target": ""
}
|
import messagebird
import argparse
from messagebird.conversation_webhook import \
CONVERSATION_WEBHOOK_EVENT_CONVERSATION_CREATED, \
CONVERSATION_WEBHOOK_EVENT_CONVERSATION_UPDATED
parser = argparse.ArgumentParser()
parser.add_argument('--accessKey', help='access key for MessageBird API', type=str, required=True)
parser.add_argument('--webhookId', help='webhook that you want to update', type=str, required=True)
parser.add_argument('--url', help='url for the webhook', type=str)
parser.add_argument('--status', help='Status of the webhook. Can be set to "enabled" or "disabled"', type=str, default='enabled')
args = vars(parser.parse_args())
try:
client = messagebird.Client(args['accessKey'])
update_request = {
'events': [CONVERSATION_WEBHOOK_EVENT_CONVERSATION_CREATED, CONVERSATION_WEBHOOK_EVENT_CONVERSATION_UPDATED],
'url': args['url'],
'status': args['status']
}
webhook = client.conversation_update_webhook(args['webhookId'], update_request)
# Print the object information.
print('The following information was returned as a Webhook object:')
print(webhook)
except messagebird.client.ErrorException as e:
print('An error occured while requesting a Webhook object:')
for error in e.errors:
print(' code : %d' % error.code)
print(' description : %s' % error.description)
print(' parameter : %s\n' % error.parameter)
|
{
"content_hash": "437b14dbe57273e8777587b15ad3037c",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 129,
"avg_line_length": 41.02857142857143,
"alnum_prop": 0.7040389972144847,
"repo_name": "messagebird/python-rest-api",
"id": "08084c87afe60a3fdec813ab05791165267e58ba",
"size": "1458",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/conversation_update_webhook.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "153235"
}
],
"symlink_target": ""
}
|
def __bootstrap__():
global __bootstrap__, __loader__, __file__
import sys, pkg_resources, imp
__file__ = pkg_resources.resource_filename(__name__,'_imaging.so')
__loader__ = None; del __bootstrap__, __loader__
imp.load_dynamic(__name__,__file__)
__bootstrap__()
|
{
"content_hash": "2bc8c8095d99a08d5924029af67b5904",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 69,
"avg_line_length": 39.714285714285715,
"alnum_prop": 0.5935251798561151,
"repo_name": "poojavade/Genomics_Docker",
"id": "f94d4e898ea4fff6bbdac5299891dc2281b9fefb",
"size": "278",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Dockerfiles/gedlab-khmer-filter-abund/pymodules/python2.7/lib/python/Pillow-2.3.0-py2.7-linux-x86_64.egg/PIL/_imaging.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AGS Script",
"bytes": "457842"
},
{
"name": "Assembly",
"bytes": "10509"
},
{
"name": "C",
"bytes": "1265138"
},
{
"name": "C++",
"bytes": "4734960"
},
{
"name": "CSS",
"bytes": "17332"
},
{
"name": "FORTRAN",
"bytes": "10375"
},
{
"name": "GLSL",
"bytes": "493"
},
{
"name": "Groff",
"bytes": "77173"
},
{
"name": "HTML",
"bytes": "395483"
},
{
"name": "Java",
"bytes": "9223"
},
{
"name": "JavaScript",
"bytes": "783663"
},
{
"name": "Jupyter Notebook",
"bytes": "189877"
},
{
"name": "Lua",
"bytes": "28217"
},
{
"name": "Makefile",
"bytes": "77825"
},
{
"name": "Matlab",
"bytes": "4346"
},
{
"name": "Objective-C",
"bytes": "567"
},
{
"name": "Perl",
"bytes": "244796"
},
{
"name": "Python",
"bytes": "54562861"
},
{
"name": "R",
"bytes": "2568"
},
{
"name": "Shell",
"bytes": "40620"
},
{
"name": "Smarty",
"bytes": "21035"
},
{
"name": "TeX",
"bytes": "55310"
}
],
"symlink_target": ""
}
|
'''
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
from ambari_commons.logging_utils import print_info_msg, print_error_msg
from ambari_commons.os_utils import run_os_command
from ambari_server.dbConfiguration import ensure_jdbc_driver_is_installed
from ambari_server.serverConfiguration import configDefaults, \
get_ambari_properties, get_java_exe_path, read_ambari_user, get_db_type
from ambari_server.setupSecurity import generate_env, ensure_can_start_under_current_user
from ambari_server.userInput import get_YN_input
from ambari_server.serverClassPath import ServerClassPath
from ambari_server.serverUtils import is_server_runing
import datetime
import logging
logger = logging.getLogger(__name__)
DB_CLEANUP_CMD = "{0} -cp {1} org.apache.ambari.server.cleanup.CleanupDriver --cluster-name {2} --from-date {3}> " + configDefaults.SERVER_OUT_FILE + " 2>&1"
#
# Run the db cleanup process
#
def run_db_purge(options):
if validate_args(options):
return 1
status, stateDesc = is_server_runing()
if not options.silent:
db_title = get_db_type(get_ambari_properties()).title
confirmBackup = get_YN_input("Ambari Server configured for {0}. Confirm you have made a backup of the Ambari Server database [y/n]".format(
db_title), True)
if not confirmBackup:
print_info_msg("Ambari Server Database purge aborted")
return 0
if status:
print_error_msg("The database purge historical data cannot proceed while Ambari Server is running. Please shut down Ambari first.")
return 1
confirm = get_YN_input(
"Ambari server is using db type {0}. Cleanable database entries older than {1} will be purged. Proceed [y/n]".format(
db_title, options.purge_from_date), True)
if not confirm:
print_info_msg("Ambari Server Database purge aborted")
return 0
jdk_path = get_java_exe_path()
if jdk_path is None:
print_error_msg("No JDK found, please run the \"setup\" command to install a JDK automatically or install any "
"JDK manually to {0}".format(configDefaults.JDK_INSTALL_DIR));
return 1
ensure_jdbc_driver_is_installed(options, get_ambari_properties())
serverClassPath = ServerClassPath(get_ambari_properties(), options)
class_path = serverClassPath.get_full_ambari_classpath_escaped_for_shell()
ambari_user = read_ambari_user()
current_user = ensure_can_start_under_current_user(ambari_user)
environ = generate_env(options, ambari_user, current_user)
print "Purging historical data from the database ..."
command = DB_CLEANUP_CMD.format(jdk_path, class_path, options.cluster_name, options.purge_from_date)
(retcode, stdout, stderr) = run_os_command(command, env=environ)
print_info_msg("Return code from database cleanup command, retcode = " + str(retcode))
if stdout:
print "Console output from database purge-history command:"
print stdout
print
if stderr:
print "Error output from database purge-history command:"
print stderr
print
if retcode > 0:
print_error_msg("Error encountered while purging the Ambari Server Database. Check the ambari-server.log for details.")
else:
print "Purging historical data completed. Check the ambari-server.log for details."
return retcode
#
# Database purge
#
def db_purge(options):
return run_db_purge(options)
def validate_args(options):
if not options.cluster_name:
print_error_msg("Please provide the --cluster-name argument.")
return 1
if not options.purge_from_date:
print_error_msg("Please provide the --from-date argument.")
return 1
try:
datetime.datetime.strptime(options.purge_from_date, "%Y-%m-%d")
except ValueError as e:
print_error_msg("The --from-date argument has an invalid format. {0}".format(e.args[0]))
return 1;
|
{
"content_hash": "f58df44b8c8cb849a89afa14014595f2",
"timestamp": "",
"source": "github",
"line_count": 122,
"max_line_length": 157,
"avg_line_length": 38.51639344262295,
"alnum_prop": 0.7082357948499681,
"repo_name": "arenadata/ambari",
"id": "26111412f2777eaa2051a6dcff862a1a83b895dd",
"size": "4722",
"binary": false,
"copies": "4",
"ref": "refs/heads/branch-adh-1.6",
"path": "ambari-server/src/main/python/ambari_server/dbCleanup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "46700"
},
{
"name": "C",
"bytes": "331204"
},
{
"name": "C#",
"bytes": "215907"
},
{
"name": "C++",
"bytes": "257"
},
{
"name": "CSS",
"bytes": "343739"
},
{
"name": "CoffeeScript",
"bytes": "8465"
},
{
"name": "Dockerfile",
"bytes": "6387"
},
{
"name": "EJS",
"bytes": "777"
},
{
"name": "FreeMarker",
"bytes": "2654"
},
{
"name": "Gherkin",
"bytes": "990"
},
{
"name": "Groovy",
"bytes": "15882"
},
{
"name": "HTML",
"bytes": "717983"
},
{
"name": "Handlebars",
"bytes": "1819641"
},
{
"name": "Java",
"bytes": "29172298"
},
{
"name": "JavaScript",
"bytes": "18571926"
},
{
"name": "Jinja",
"bytes": "1490416"
},
{
"name": "Less",
"bytes": "412933"
},
{
"name": "Makefile",
"bytes": "11111"
},
{
"name": "PHP",
"bytes": "149648"
},
{
"name": "PLpgSQL",
"bytes": "287501"
},
{
"name": "PowerShell",
"bytes": "2090340"
},
{
"name": "Python",
"bytes": "18507704"
},
{
"name": "R",
"bytes": "3943"
},
{
"name": "Ruby",
"bytes": "38590"
},
{
"name": "SCSS",
"bytes": "40072"
},
{
"name": "Shell",
"bytes": "924115"
},
{
"name": "Stylus",
"bytes": "820"
},
{
"name": "TSQL",
"bytes": "42351"
},
{
"name": "Vim script",
"bytes": "5813"
},
{
"name": "sed",
"bytes": "2303"
}
],
"symlink_target": ""
}
|
from invocations.docs import docs, www
from invocations.packaging import release
from invoke import Collection
ns = Collection(docs, www, release)
ns.configure({
'packaging': {
'sign': True,
'wheel': True,
'changelog_file': 'sites/www/changelog.rst',
'package': 'fabric',
'version_module': 'version',
},
})
|
{
"content_hash": "8a0b712b1254cf58c320ab1e2f71f83c",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 52,
"avg_line_length": 22.375,
"alnum_prop": 0.6256983240223464,
"repo_name": "cmattoon/fabric",
"id": "72f68f819ab266228f74aa14a25a04eaa7e2fc50",
"size": "358",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tasks.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "469441"
}
],
"symlink_target": ""
}
|
import os
# NOTE(jokke): simplified transition to py3, behaves like py2 xrange
from six.moves import range
from glance.common import crypt
from glance.common import utils
from glance.tests import utils as test_utils
class UtilsTestCase(test_utils.BaseTestCase):
def test_encryption(self):
# Check that original plaintext and unencrypted ciphertext match
# Check keys of the three allowed lengths
key_list = ["1234567890abcdef",
"12345678901234567890abcd",
"1234567890abcdef1234567890ABCDEF"]
plaintext_list = ['']
blocksize = 64
for i in range(3 * blocksize):
plaintext_list.append(os.urandom(i))
for key in key_list:
for plaintext in plaintext_list:
ciphertext = crypt.urlsafe_encrypt(key, plaintext, blocksize)
self.assertNotEqual(ciphertext, plaintext)
text = crypt.urlsafe_decrypt(key, ciphertext)
self.assertEqual(plaintext, text)
def test_empty_metadata_headers(self):
"""Ensure unset metadata is not encoded in HTTP headers"""
metadata = {
'foo': 'bar',
'snafu': None,
'bells': 'whistles',
'unset': None,
'empty': '',
'properties': {
'distro': '',
'arch': None,
'user': 'nobody',
},
}
headers = utils.image_meta_to_http_headers(metadata)
self.assertNotIn('x-image-meta-snafu', headers)
self.assertNotIn('x-image-meta-uset', headers)
self.assertNotIn('x-image-meta-snafu', headers)
self.assertNotIn('x-image-meta-property-arch', headers)
self.assertEqual('bar', headers.get('x-image-meta-foo'))
self.assertEqual('whistles', headers.get('x-image-meta-bells'))
self.assertEqual('', headers.get('x-image-meta-empty'))
self.assertEqual('', headers.get('x-image-meta-property-distro'))
self.assertEqual('nobody', headers.get('x-image-meta-property-user'))
|
{
"content_hash": "fd3db07b7882e569cc30ca1e8d4c6026",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 77,
"avg_line_length": 36.13793103448276,
"alnum_prop": 0.5992366412213741,
"repo_name": "wkoathp/glance",
"id": "744c00a250c8a674707997c5010592d59d627403",
"size": "2737",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "glance/tests/unit/test_misc.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "3844475"
},
{
"name": "Shell",
"bytes": "7860"
}
],
"symlink_target": ""
}
|
import zstackwoodpecker.test_state as ts_header
TestAction = ts_header.TestAction
def path():
return dict(initial_formation="template2", \
path_list=[[TestAction.create_data_volume_from_image, "volume1", "=scsi"],
[TestAction.attach_volume, "vm1", "volume1"],
[TestAction.create_data_volume_from_image, "volume2", "=scsi"],
[TestAction.attach_volume, "vm1", "volume2"],
[TestAction.create_data_volume_from_image, "volume3", "=scsi"],
[TestAction.attach_volume, "vm1", "volume3"],
[TestAction.delete_volume, "volume1"],
[TestAction.create_volume_snapshot, "vm1-root", "snapshot1-1"],
[TestAction.create_volume_snapshot, "vm1-root", "snapshot1-2"],
[TestAction.detach_volume, "volume2"],
[TestAction.create_volume_snapshot, "volume2", "snapshot2-1"],
[TestAction.create_volume_snapshot, "volume2", "snapshot2-2"],
[TestAction.create_volume_snapshot, "volume2", "snapshot2-3"],
[TestAction.create_volume_snapshot, "volume2", "snapshot2-4"],
[TestAction.delete_volume, "volume3"],
[TestAction.batch_delete_volume_snapshot, ["snapshot2-2","snapshot2-3"]],
[TestAction.reboot_vm, "vm1"],
[TestAction.stop_vm, "vm1"],
[TestAction.use_volume_snapshot, "snapshot2-1"],
[TestAction.attach_volume, "vm1", "volume2"],
[TestAction.use_volume_snapshot, "snapshot1-1"],
[TestAction.use_volume_snapshot, "snapshot2-1"],
[TestAction.start_vm, "vm1"],
[TestAction.delete_volume_snapshot, "snapshot2-1"],
[TestAction.reboot_vm, "vm1"]
])
|
{
"content_hash": "a431f53f6437aabd5e885eb4c9bfb538",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 99,
"avg_line_length": 69.46666666666667,
"alnum_prop": 0.4995201535508637,
"repo_name": "zstackio/zstack-woodpecker",
"id": "1f7fca34c41c518fac636a361f9e19844e7ef262",
"size": "2084",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "integrationtest/vm/multihosts/volumes/paths/path60.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2356"
},
{
"name": "Go",
"bytes": "49822"
},
{
"name": "Makefile",
"bytes": "687"
},
{
"name": "Puppet",
"bytes": "875"
},
{
"name": "Python",
"bytes": "13070596"
},
{
"name": "Shell",
"bytes": "177861"
}
],
"symlink_target": ""
}
|
import unittest
import joerd.download as download
try:
# Python 2.x
import BaseHTTPServer as http
except ImportError:
# Python 3.x
from http import server as http
import contextlib
from httptestserver import Server
import re
# simple handler which does what most HTTP servers (should) do; responds with
# the whole requested file.
class _SimpleHandler(http.BaseHTTPRequestHandler):
def __init__(self, value, *args):
self.value = value
http.BaseHTTPRequestHandler.__init__(self, *args)
def do_GET(self):
self.send_response(200)
self.send_header('Content-Length', len(self.value))
self.end_headers()
self.wfile.write(self.value)
# handler which emulates the GMTED server / TCP-layer rate-limiter; it drops
# connections after some number of bytes.
class _DroppingHandler(http.BaseHTTPRequestHandler):
def __init__(self, value, max_len_obj, support_range, *args):
self.value = value
self.max_len_obj = max_len_obj
self.support_range = support_range
http.BaseHTTPRequestHandler.__init__(self, *args)
def _parse_range(self, r, max_len):
if r is None:
return None
m = re.match('bytes=([0-9]+)-([0-9]*)', r)
if not m:
return None
start = int(m.group(1))
end = int(m.group(2)) if len(m.group(2)) > 0 else None
if end is None:
end = min(start + max_len, len(self.value))
else:
end = min(end, start + max_len, len(self.value))
return (start, end)
def do_GET(self):
max_len = self.max_len_obj.get()
byte_range = self._parse_range(self.headers.get('Range'), max_len)
if byte_range is None or not self.support_range:
self.send_response(200)
if self.support_range:
self.send_header('Accept-Ranges', 'bytes')
self.send_header('Content-Length', len(self.value))
self.end_headers()
self.wfile.write(self.value[0:max_len])
elif byte_range[0] >= len(self.value):
self.send_response(416)
self.send_header('Accept-Ranges', 'bytes')
self.end_headers()
else:
cr = 'bytes %d-%d/%d' % \
(byte_range[0], len(self.value) - 1, len(self.value))
self.send_response(206)
self.send_header('Accept-Ranges', 'bytes')
self.send_header('Content-Length', len(self.value) - byte_range[0])
self.send_header('Content-Range', cr)
self.end_headers()
self.wfile.write(self.value[byte_range[0]:byte_range[1]+1])
class _MaxLenFunc:
def __init__(self, init_len, incr_len):
self.length = init_len
self.incr = incr_len
def get(self):
l = self.length
self.length = l + self.incr
return l
# guard function to run a test HTTP server on another thread and reap it when
# it goes out of scope.
@contextlib.contextmanager
def _test_http_server(handler):
server = Server('127.0.0.1', 0, 'http', handler)
server.start()
yield server
class TestDownload(unittest.TestCase):
def test_download_simple(self):
# Test that the download function can download a file over HTTP.
value = "Some random string here."
def _handler(*args):
return _SimpleHandler(value, *args)
def _verifier(filelike):
return filelike.read() == value
with _test_http_server(_handler) as server:
with download.get(server.url('/'), dict(
verifier=_verifier, tries=1)) as data:
self.assertEqual(value, data.read())
def test_download_restart(self):
# Test that the download function can handle restarting, and fetching
# a file as a series of smaller byte ranges.
value = "Some random string here."
# The server will only return 4-byte chunks, but it should be possible
# to download the whole file eventually.
max_len = _MaxLenFunc(4, 0)
def _handler(*args):
return _DroppingHandler(value, max_len, True, *args)
def _verifier(filelike):
v = filelike.read() == value
return v
with _test_http_server(_handler) as server:
with download.get(server.url('/'), dict(
verifier=_verifier, tries=(len(value) / 4 + 1))) as data:
self.assertEqual(value, data.read())
def test_download_restart_from_scratch(self):
# Test that the download function can handle restarting from scratch
# if the server doesn't support byte range requests.
value = "Some random string here."
# The server initially doesn't give the whole file, but eventually
# will.
max_len = _MaxLenFunc(4, 4)
def _handler(*args):
return _DroppingHandler(value, max_len, False, *args)
def _verifier(filelike):
v = filelike.read() == value
return v
with _test_http_server(_handler) as server:
with download.get(server.url('/'), dict(
verifier=_verifier, tries=(len(value) / 4 + 1))) as data:
self.assertEqual(value, data.read())
def test_download_limited_retries(self):
# In the case that the server is so thoroughly broken that it is not
# possible to download the file (or would take an inordinate amount of
# time), the download process should cap the maximum number of tries
# to a finite value.
value = "Some random string here."
# The server just gives back the same 4 bytes over and over.
max_len = _MaxLenFunc(4, 0)
def _handler(*args):
return _DroppingHandler(value, max_len, False, *args)
def _verifier(filelike):
v = filelike.read() == value
return v
with _test_http_server(_handler) as server:
with self.assertRaises(download.DownloadFailedError):
with download.get(server.url('/'), dict(verifier=_verifier,
tries=10)) as data:
data.read()
|
{
"content_hash": "19c42b6458fac92e6b7989353d1fb962",
"timestamp": "",
"source": "github",
"line_count": 183,
"max_line_length": 79,
"avg_line_length": 34.15846994535519,
"alnum_prop": 0.5898256278995361,
"repo_name": "mapzen/joerd",
"id": "6f6791f4914a77d11e2ebb9f86913d92b2bae5e2",
"size": "6251",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/test_download.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "158948"
}
],
"symlink_target": ""
}
|
import sys
from setuptools import setup
from setuptools.command.test import test as TestCommand
with open('requirements.txt') as f:
requirements = f.read().splitlines()
class Tox(TestCommand):
user_options = [('tox-args=', 'a', "Arguments to pass to tox")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.tox_args = None
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import tox
import shlex
args = self.tox_args
if args:
args = shlex.split(self.tox_args)
errno = tox.cmdline(args=args)
sys.exit(errno)
setup(
author='dron22',
author_email='info@fastback.io',
cmdclass={'test': Tox},
description='Simple DynDns with AWS Route53',
include_package_data=True,
install_requires=requirements,
license='MIT',
name='r53dyndns',
packages=['r53dyndns'],
scripts=['bin/r53dyndns'],
tests_require=['tox'],
url='https://github.com/dron22/r53dyndns',
version='0.1.0',
zip_safe=False
)
|
{
"content_hash": "77c45bfc2b8391f9b7a6e84f451b8b9c",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 67,
"avg_line_length": 24.918367346938776,
"alnum_prop": 0.6322686322686323,
"repo_name": "dron22/r53dyndns",
"id": "8ed881de110518ae4438c92ec60a9426078757cc",
"size": "1222",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7807"
}
],
"symlink_target": ""
}
|
from googleapiclient import discovery
from googleapiclient import errors
from multiprocessing import pool
from oauth2client import client
from oauth2client import file as oauth_file
from oauth2client import service_account
from oauth2client import tools
from protorpc import message_types
from protorpc import messages
from protorpc import protojson
import base64
import hashlib
import httplib2
import json
import logging
import mimetypes
import multiprocessing
import os
import progressbar
import requests
import threading
import traceback
import urllib.parse
# Google API details for a native/installed application for API project
# grow-prod.
CLIENT_ID = '578372381550-jfl3hdlf1q5rgib94pqsctv1kgkflu1a.apps.googleusercontent.com'
CLIENT_SECRET = 'XQKqbwTg88XVpaBNRcm_tYLf' # Not so secret for installed apps.
REDIRECT_URI = 'urn:ietf:wg:oauth:2.0:oob'
OAUTH_SCOPES = [
'https://www.googleapis.com/auth/plus.me',
'https://www.googleapis.com/auth/userinfo.email',
]
APPENGINE_SERVER_PREFIXES = ('Development/', 'Google App Engine/')
# https://cloud.google.com/appengine/docs/standard/python/how-requests-are-handled
IS_APPENGINE = os.getenv('SERVER_SOFTWARE', '').startswith(
APPENGINE_SERVER_PREFIXES)
try:
from oauth2client.contrib import appengine
except ImportError:
appengine = None
DEFAULT_STORAGE_KEY = 'WebReview Client'
_CLEARED_AUTH_KEYS = {}
requests_logger = logging.getLogger('requests')
requests_logger.setLevel(logging.WARNING)
class Error(Exception):
pass
class Verb(object):
GET = 'GET'
PUT = 'PUT'
DELETE = 'DELETE'
class AuthorMessage(messages.Message):
name = messages.StringField(1)
email = messages.StringField(2)
class CommitMessage(messages.Message):
sha = messages.StringField(1)
author = messages.MessageField(AuthorMessage, 2)
date = message_types.DateTimeField(3)
message = messages.StringField(4)
has_unstaged_changes = messages.BooleanField(5)
branch = messages.StringField(6)
def batch(items, size):
"""Batches a list into a list of lists, with sub-lists sized by a specified
batch size."""
return [items[x:x + size] for x in range(0, len(items), size)]
def get_storage(key, username):
"""Returns the Storage class compatible with the current environment."""
if IS_APPENGINE and appengine:
return appengine.StorageByKeyName(
appengine.CredentialsModel, username, 'credentials')
file_name = os.path.expanduser('~/.config/webreview/{}_{}'.format(key, username))
dir_name = os.path.dirname(file_name)
if not os.path.exists(dir_name):
os.makedirs(dir_name)
return oauth_file.Storage(file_name)
class HttpWithApiKey(httplib2.Http):
def __init__(self, *args, **kwargs):
self.api_key = kwargs.pop('api_key', None)
super(HttpWithApiKey, self).__init__(*args, **kwargs)
def _request(self, conn, host, absolute_uri, request_uri, method, body, headers,
redirections, cachekey):
if headers is None:
headers = {}
if self.api_key is not None:
headers['WebReview-Api-Key'] = self.api_key
return super(HttpWithApiKey, self)._request(
conn, host, absolute_uri, request_uri, method, body, headers,
redirections, cachekey)
class RpcError(Error):
def __init__(self, status, message=None, data=None, tb=None):
self.status = status
self.message = data['error_message'] if data else message
self.data = data
self.tb = tb
def __repr__(self):
return '{}: {}'.format(self.status, self.message)
def __str__(self):
return '{}: {}'.format(self.status, self.message)
def __getitem__(self, name):
return self.data[name]
class WebReviewRpcError(RpcError):
pass
class GoogleStorageRpcError(RpcError, IOError):
pass
class RenderedDocStub(object):
"""Stub to simulate the rendered document."""
def __init__(self, path=None, content=None):
self.path = path
self.content = content
def read(self):
return self.content
def write(self, content):
self.content = content
return self.content
class WebReview(object):
_pool_size = 10
def __init__(self, project=None, name=None, commit=None, host=None,
secure=False, username='default', api='webreview',
version='v0', api_key=None):
if '/' not in project:
raise ValueError('Project must be in format: <owner>/<project>')
self.owner, self.project = project.split('/')
self.name = name
self.gs = GoogleStorageSigner()
self.lock = threading.Lock()
self.pool = pool.ThreadPool(processes=self._pool_size)
self.commit = commit
root = '{}://{}/_ah/api'.format('https' if secure else 'http', host)
self.api_key = api_key
self._api = api
self._version = version
self._url = '{}/discovery/v1/apis/{}/{}/rest'.format(
root, api, version)
self._service = None
@property
def fileset(self):
commit = None
if self.commit:
commit = json.loads(protojson.encode_message(self.commit))
return {
'name': self.name.lower() if self.name else '',
'commit': commit,
'project': {
'nickname': self.project.lower() if self.project else '',
'owner': {
'nickname': self.owner.lower() if self.owner else '',
},
},
}
def get_service(self, username='default', reauth=False):
http = HttpWithApiKey(api_key=self.api_key)
if self.api_key is None:
credentials = WebReview.get_credentials(
username=username, reauth=reauth)
credentials.authorize(http)
if credentials.access_token_expired:
credentials.refresh(http)
return discovery.build(
self._api,
self._version,
discoveryServiceUrl=self._url,
http=http)
def login(self, username='default', reauth=False):
self._service = self.get_service(username=username, reauth=reauth)
@property
def service(self):
if self._service is not None:
return self._service
self._service = self.get_service()
return self._service
@staticmethod
def _get_flags():
parser = tools.argparser
if os.getenv('INTERACTIVE_AUTH'):
args = []
else:
args = ['--noauth_local_webserver']
flags, _ = parser.parse_known_args(args)
return flags
@staticmethod
def get_credentials(username, reauth=False):
if os.getenv('CLEAR_AUTH') and username not in _CLEARED_AUTH_KEYS:
WebReview.clear_credentials(username)
storage = get_storage(DEFAULT_STORAGE_KEY, username)
flags = WebReview._get_flags()
if os.getenv('AUTH_KEY_FILE'):
key_file = os.path.expanduser(os.getenv('AUTH_KEY_FILE'))
credentials = (service_account.ServiceAccountCredentials.
from_json_keyfile_name(key_file, OAUTH_SCOPES))
else:
credentials = storage.get()
if credentials and not credentials.invalid:
return credentials
flow = client.OAuth2WebServerFlow(
CLIENT_ID, CLIENT_SECRET, OAUTH_SCOPES, redirect_uri=REDIRECT_URI)
credentials = tools.run_flow(flow, storage, flags)
# run_flow changes the logging level, so change it back.
logging.getLogger().setLevel(getattr(logging, 'INFO'))
return credentials
@staticmethod
def clear_credentials(username):
storage = get_storage(DEFAULT_STORAGE_KEY, username)
storage.delete()
_CLEARED_AUTH_KEYS[username] = True
def upload_dir(self, build_dir):
paths_to_rendered_doc = WebReview._get_paths_to_rendered_doc_from_dir(
build_dir)
return self.write(paths_to_rendered_doc)
def get_signed_requests(self, verb, paths_to_rendered_doc):
self.pool = pool.ThreadPool(processes=self._pool_size)
# Batch the request-signing request into groups of 200 to avoid
# DeadlineExceededError on the server.
items_to_batch = list(paths_to_rendered_doc.items())
batched_items = batch(items_to_batch, 200)
manager = multiprocessing.Manager()
signed_requests = manager.list()
error_objs = manager.list()
batch_size = len(batched_items)
text = 'Starting: %(value)d/{} (in %(elapsed)s)'
widgets = [progressbar.FormatLabel(text.format(batch_size))]
bar = None
if batch_size > 2:
bar = progressbar.ProgressBar(widgets=widgets, maxval=batch_size)
bar.start()
def _execute_request_signing_request(reqs, errs, service, item):
try:
batched_paths_to_contents = dict(item)
req = self.gs.create_sign_requests_request(
verb, self.fileset, batched_paths_to_contents)
try:
resp = service.sign_requests(body=req).execute()
except errors.HttpError as e:
errs += [(e.resp.status, e._get_reason().strip(), None)]
return
if bar:
bar.update(bar.value + 1)
reqs += resp['signed_requests']
except Exception as err:
# Return traceback as a list of strings - complex objects fail.
tb = traceback.format_tb(err.__traceback__)
errs += [('Error creating signed request', str(err), tb)]
for item in batched_items:
service = self.get_service()
args = (signed_requests, error_objs, service, item)
self.pool.apply_async(_execute_request_signing_request, args=args)
self.pool.close()
self.pool.join()
if error_objs:
status, reason, tb = error_objs[0]
raise WebReviewRpcError(status, reason, tb=tb)
if bar:
bar.finish()
return signed_requests
def delete(self, paths):
paths_to_rendered_doc = dict(
[(path, RenderedDocStub()) for path in paths])
signed_requests = self.get_signed_requests(
Verb.DELETE, paths_to_rendered_doc)
return self._execute_signed_requests(signed_requests, paths_to_rendered_doc)
def read(self, paths):
paths_to_rendered_doc = dict(
[(path, RenderedDocStub()) for path in paths])
signed_requests = self.get_signed_requests(
Verb.GET, paths_to_rendered_doc)
return self._execute_signed_requests(signed_requests, paths_to_rendered_doc)
def write(self, paths_to_rendered_doc):
signed_requests = self.get_signed_requests(
Verb.PUT, paths_to_rendered_doc)
return self._execute_signed_requests(signed_requests, paths_to_rendered_doc)
def finalize(self):
try:
req = {'fileset': self.fileset}
return self.service.finalize(body=req).execute()
except errors.HttpError as e:
raise WebReviewRpcError(e.resp.status, e._get_reason().strip())
def _execute(self, req, path, content, bar, resps, errors):
error = None
resp = None
try:
resp = self.gs.execute_signed_request(req, content)
except Exception as err:
error = err
with self.lock:
if resp is not None:
resps[path] = resp
if error is not None:
errors[path] = error
if bar is not None:
bar.update(bar.value + 1)
def _execute_signed_requests(self, signed_requests, paths_to_rendered_doc):
if not signed_requests:
raise ValueError('No requests to sign.')
self.pool = pool.ThreadPool(processes=self._pool_size)
resps = {}
errors = {}
num_files = len(signed_requests)
text = 'Working: %(value)d/{} (in %(elapsed)s)'
widgets = [progressbar.FormatLabel(text.format(num_files))]
if num_files > 1:
bar = progressbar.ProgressBar(widgets=widgets, maxval=num_files)
bar.start()
for req in signed_requests:
path = req['path']
path_raw = urllib.parse.unquote(path)
content = paths_to_rendered_doc[path_raw].read()
if isinstance(content, str):
content = content.encode('utf-8')
args = (req, path, content, bar, resps, errors)
self.pool.apply_async(self._execute, args=args)
self.pool.close()
self.pool.join()
bar.finish()
else:
req = signed_requests[0]
path = req['path']
path_raw = urllib.parse.unquote(path)
content = paths_to_rendered_doc[path_raw].read()
if isinstance(content, str):
content = content.encode('utf-8')
self._execute(req, path, content, None, resps, errors)
return resps, errors
@classmethod
def _get_paths_to_rendered_doc_from_dir(cls, build_dir):
paths_to_rendered_doc = {}
for pre, _, files in os.walk(build_dir):
for f in files:
path = os.path.join(pre, f)
with open(path) as fp:
path = path.replace(build_dir, '')
if not path.startswith('/'):
path = '/{}'.format(path)
content = fp.read()
if isinstance(content, str):
content = content.encode('utf-8')
paths_to_rendered_doc[path] = RenderedDocStub(content=content)
return paths_to_rendered_doc
class GoogleStorageSigner(object):
@staticmethod
def create_unsigned_request(verb, path, content=None):
req = {
'path': urllib.parse.quote(path),
'verb': verb,
}
if verb == Verb.PUT:
if path.endswith('/'):
mimetype = 'text/html'
else:
mimetype = mimetypes.guess_type(path)[0]
mimetype = mimetype or 'application/octet-stream'
try:
content = content.encode('utf-8')
except AttributeError:
try:
content = content.decode('utf-8')
except (AttributeError, UnicodeDecodeError):
pass
md5_digest = base64.b64encode(
hashlib.md5(content).digest()).decode("utf-8")
req['headers'] = {}
req['headers']['content_length'] = str(len(content))
req['headers']['content_md5'] = md5_digest
req['headers']['content_type'] = mimetype
return req
def create_sign_requests_request(self, verb, fileset, paths_to_rendered_doc):
unsigned_requests = []
for path, rendered_doc in paths_to_rendered_doc.items():
req = self.create_unsigned_request(verb, path, rendered_doc.read())
unsigned_requests.append(req)
return {
'fileset': fileset,
'unsigned_requests': unsigned_requests,
}
@staticmethod
def execute_signed_request(signed_request, content=None, retry=0):
req = signed_request
params = {
'GoogleAccessId': req['params']['google_access_id'],
'Signature': req['params']['signature'],
'Expires': req['params']['expires'],
}
if signed_request['verb'] == Verb.PUT:
headers = {
'Content-Type': req['headers']['content_type'],
'Content-MD5': req['headers']['content_md5'],
'Content-Length': req['headers']['content_length'],
}
resp = requests.put(req['url'], params=params, headers=headers,
data=content)
elif signed_request['verb'] == Verb.GET:
resp = requests.get(req['url'], params=params)
elif signed_request['verb'] == Verb.DELETE:
resp = requests.delete(req['url'], params=params)
# GCS may intermittently respond with 50X errors. Retry up to three times
# when encountering 50Xs.
if retry < 3 and resp.status_code in [502, 503, 504]:
return GoogleStorageSigner.execute_signed_request(signed_request,
content=content, retry=retry + 1)
if not (resp.status_code >= 200 and resp.status_code < 205):
raise GoogleStorageRpcError(resp.status_code, message=resp.content)
return resp.content
|
{
"content_hash": "a4c4a4c512cdf0b9e14d323f69afbfdd",
"timestamp": "",
"source": "github",
"line_count": 463,
"max_line_length": 95,
"avg_line_length": 36.49244060475162,
"alnum_prop": 0.5915601325757576,
"repo_name": "grow/webreview-client",
"id": "ea1f457913f71c6d28064f767f1d62b1c71c1319",
"size": "16896",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "webreview/webreview.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1129"
},
{
"name": "Python",
"bytes": "21345"
}
],
"symlink_target": ""
}
|
from distutils.core import setup
from setuptools import find_packages
setup(
name='wagtail-streamfieldtools',
packages=find_packages(),
version='0.3',
author=u'Jonathan Ellenberger',
author_email='jonathan_ellenberger@wgbh.org',
url='http://github.com/WGBH/wagtail-streamfieldtools/',
license='MIT License, see LICENSE',
description="A suite of tools that extends Wagtail's already-awesome "
"StreamField to make it even more flexible/versatile/useful!",
long_description=open('README.rst').read(),
zip_safe=False,
install_requires=['wagtail>=1.2'],
classifiers=[
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Development Status :: 3 - Alpha'
]
)
|
{
"content_hash": "8386d83ec1506e1a83c33aa3cd28ace9",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 78,
"avg_line_length": 37.354838709677416,
"alnum_prop": 0.6217616580310881,
"repo_name": "WGBH/wagtail-streamfieldtools",
"id": "9d6ed7b27b910ada4173f6f4ad16a733eb36061b",
"size": "1182",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "608"
},
{
"name": "Python",
"bytes": "34853"
}
],
"symlink_target": ""
}
|
import argparse
import configparser
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-max_batch_size', '--max_batch_size', type=int, required=True, metavar='NUMBER',
help='batch size')
parser.add_argument('-max_input_length', '--max_input_length', type=int, required=True, metavar='NUMBER',
help='max input length')
args = parser.parse_args()
args_dict = vars(args)
batch_size = args_dict["max_batch_size"]
max_input_length = args_dict["max_input_length"]
path = f"../examples/cpp/multi_gpu_gpt/start_ids.csv"
with open(path, 'w') as f:
ids = ""
for i in range(batch_size):
for j in range(max_input_length):
if j == 0:
ids = f"{ids}198"
else:
ids = f"{ids}, 198"
ids = f"{ids}\n"
f.write(ids)
|
{
"content_hash": "db743d2aa1a8d75635f2ad4f9e9ce3d2",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 109,
"avg_line_length": 35.92307692307692,
"alnum_prop": 0.5331905781584583,
"repo_name": "NVIDIA/FasterTransformer",
"id": "ce4c56741983f0988853c64967fc54114c40aab5",
"size": "1550",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "examples/pytorch/gpt/utils/generate_start_ids.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "2444"
},
{
"name": "C++",
"bytes": "3361167"
},
{
"name": "CMake",
"bytes": "117845"
},
{
"name": "Cuda",
"bytes": "1734491"
},
{
"name": "HCL",
"bytes": "1482"
},
{
"name": "Python",
"bytes": "73804"
},
{
"name": "Shell",
"bytes": "46724"
}
],
"symlink_target": ""
}
|
import numpy as np
import re
import itertools
from collections import Counter
import numpy as np
import time
import gc
import gzip
from random import random
import sys
from scipy import misc
import imgaug as ia
from imgaug import augmenters as iaa
from imgaug import parameters as iap
import matplotlib
from random import random
matplotlib.use('Agg')
import matplotlib.pyplot as pyplot
reload(sys)
sys.setdefaultencoding("utf-8")
class InputHelper(object):
def getfilenames(self, line, base_filepath, mapping_dict, max_document_length):
temp = []
line = line.strip().split(" ")
# Store paths of all images in the sequence
for i in range(1, len(line), 1):
if i < max_document_length:
#temp.append(base_filepath + mapping_dict[line[0]] + '/Image' + line[i].zfill(5) + '.jpg')
temp.append(base_filepath + mapping_dict[line[0]] + '/' + line[i] + '.png')
#append-black images if the seq length is less than 20
while len(temp) < max_document_length:
temp.append(base_filepath + 'black_image.png')
return temp
def getTsvTestData(self, base_filepath, max_document_length, filename, label):
print("Loading training data from " + base_filepath)
x1=[]
x2=[]
y=[]
video_lengths = []
#load all the mapping dictonaries
mapping_dict = {}
print(base_filepath+'mapping_file')
for line_no,line in enumerate(open(base_filepath + 'mapping_file')):
mapping_dict['F' + str(line_no+1)] = line.strip()
# Loading Positive sample file
train_data=[]
l_neg = []
for line in open(filename):
line=line.split('/', 1)[0]
if (len(line) > 0 and line[0] == 'F'):
l_neg.append(line.strip())
# negative samples from file
num_negative_samples = len(l_neg)
for i in range(0,num_negative_samples,2):
#if random() < 0.2:
#print(num_negative_samples, i)
x1.append(self.getfilenames(l_neg[i], base_filepath, mapping_dict, max_document_length))
x2.append(self.getfilenames(l_neg[i+1], base_filepath, mapping_dict, max_document_length))
y.append(label)#np.array([0,1]))
temp_length = len(l_neg[i].strip().split(" "))
video_lengths.append(max_document_length if temp_length > max_document_length else temp_length)
#l_neg = len(x1) - len(l_pos)//2
return np.asarray(x1),np.asarray(x2),np.asarray(y), np.asarray(video_lengths)
def batch_iter(self, x1, x2, y, video_lengths, batch_size, num_epochs, conv_model_spec, shuffle=False, is_train=False):
"""
Generates a batch iterator for a dataset.
"""
data_size = len(y)
temp = int(data_size/batch_size)
num_batches_per_epoch = temp+1 if (data_size%batch_size) else temp
for epoch in range(num_epochs):
# Shuffle the data at each epoch
if shuffle:
shuffle_indices = np.random.permutation(np.arange(data_size))
x1_shuffled=x1[shuffle_indices]
x2_shuffled=x2[shuffle_indices]
y_shuffled=y[shuffle_indices]
video_lengths_shuffled = video_lengths[shuffle_indices]
else:
x1_shuffled=x1
x2_shuffled=x2
y_shuffled=y
video_lengths_shuffled = video_lengths
for batch_num in range(num_batches_per_epoch):
start_index = batch_num * batch_size
end_index = min((batch_num + 1) * batch_size, data_size)
processed_imgs = self.load_preprocess_images(x1_shuffled[start_index:end_index], x2_shuffled[start_index:end_index], conv_model_spec, epoch ,is_train)
yield( processed_imgs[0], processed_imgs[1], y_shuffled[start_index:end_index], video_lengths_shuffled[start_index:end_index])
def normalize_input(self, img, conv_model_spec):
img = img.astype(dtype=np.float32)
img = img[:, :, [2, 1, 0]] # swap channel from RGB to BGR
img = img - conv_model_spec[0]
return img
def load_preprocess_images(self, side1_paths, side2_paths, conv_model_spec, epoch, is_train=False):
batch1_seq, batch2_seq = [], []
for side1_img_paths, side2_img_paths in zip(side1_paths, side2_paths):
for side1_img_path,side2_img_path in zip(side1_img_paths, side2_img_paths):
img_org = misc.imread(side1_img_path)
img_resized = misc.imresize(np.asarray(img_org), conv_model_spec[1])
img_normalized = self.normalize_input(img_resized, conv_model_spec)
batch1_seq.append(img_normalized)
img_org = misc.imread(side2_img_path)
img_resized = misc.imresize(np.asarray(img_org), conv_model_spec[1])
img_normalized = self.normalize_input(img_resized, conv_model_spec)
batch2_seq.append(img_normalized)
temp = [np.asarray(batch1_seq), np.asarray(batch2_seq)]
return temp
# Data Preparatopn
def getTestDataSet(self, data_path, max_document_length, filename, label):
x1,x2,y,video_lengths = self.getTsvTestData(data_path, max_document_length, filename, label)
gc.collect()
return x1,x2, y,video_lengths
def save_plot(val1, val2, xlabel, ylabel, title, axis, legend,path):
pyplot.figure()
pyplot.plot(val1, '*r--', val2, '^b-')
pyplot.xlabel(xlabel)
pyplot.ylabel(ylabel)
pyplot.title(title)
pyplot.axis(axis)
pyplot.legend(legend)
pyplot.savefig(path+'.pdf')
pyplot.clf()
def compute_distance(distance, loss):
d = np.copy(distance)
if loss == "AAAI":
d[distance>=0.5]=1
d[distance<0.5]=0
elif loss == "contrastive":
d[distance>0.5]=0
d[distance<=0.5]=1
else:
raise ValueError("Unkown loss function {%s}".format(loss))
return d
|
{
"content_hash": "3ebc928f11174a563f91e5317407ac83",
"timestamp": "",
"source": "github",
"line_count": 163,
"max_line_length": 166,
"avg_line_length": 37.15337423312884,
"alnum_prop": 0.6058454425363277,
"repo_name": "halwai/cnn-rnn-siamese-video-similarity",
"id": "a49852466f8dcb328171116dfb8e774093016ec8",
"size": "6056",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "eval_helper.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "72986"
},
{
"name": "Shell",
"bytes": "485"
}
],
"symlink_target": ""
}
|
import multiprocessing
import os
import signal
import time
import atexit
import threading
import config
import logsupport as L
from controlevents import CEvent, PostEvent, ConsoleEvent
KeepAlive = multiprocessing.Event()
FailsafeInterval = 60
def DevPrint(msg):
with open('/home/pi/Console/.HistoryBuffer/hlog', 'a') as f:
f.write('{}: {}\n'.format(time.time(), msg))
f.flush()
def TempThreadList():
"""
This routine is just for working cleanly with PyCharm IDE. If you leave a system running that was launched from
PyCharm, if the PC controlling it goes to sleep it kills the console. Unfortunately it only partially kills it and
so leaves zombies and threads running. This code makes sure everything gets killed so as to not leave connections
to the ISY which will eventually force it to its limit without manual intervention.
"""
time.sleep(10)
while True:
multiprocessing.active_children() # clean any zombie failsafe
# for x in L:
# DevPrint('Process {}: alive: {} pid: {} daemon: {}'.format(x.name, x.is_alive(), x.pid, x.daemon))
threadlist = threading.enumerate()
for thd in threadlist:
# DevPrint('Threadlist: {} alive: {} ident: {} daemon: {} \n'.format(thd.name, thd.is_alive(), thd.ident, thd.daemon))
if thd.name == 'MainThread' and not thd.is_alive():
DevPrint('Main Thread died')
os.kill(os.getpid(), signal.SIGINT) # kill myself
# DevPrint('=================End')
time.sleep(30)
def NoEventInjector():
L.Logs.Log('Starting watchdog activity injector')
while config.Running:
# noinspection PyBroadException
try:
now = time.time()
L.Logs.Log('Inject: {}'.format(now), severity=L.ConsoleDetailHigh)
PostEvent(ConsoleEvent(CEvent.FailSafePing, inject=now))
time.sleep(FailsafeInterval / 2)
except Exception as E:
time.sleep(FailsafeInterval / 2)
DevPrint('Inject Exception {}'.format(repr(E)))
# spurious exceptions during shutdown
DevPrint('Injector exiting')
# noinspection PyProtectedMember,PyUnusedLocal
def EndWatchDog(signum, frame):
DevPrint('Watchdog ending on shutdown {}'.format(signum))
os._exit(0)
# noinspection PyProtectedMember
def WatchdogDying(signum, frame):
try:
if signum == signal.SIGTERM:
DevPrint('Watchdog saw SIGTERM - must be from systemd')
# console should have also seen this - give it time to shut down
time.sleep(30) # we should see a USR1 from console
os._exit(0)
else:
DevPrint('Watchdog dying signum: {} frame: {}'.format(signum, frame))
# noinspection PyBroadException
try:
os.kill(config.sysStore.Console_pid, signal.SIGUSR1)
except:
pass # probably main console already gone
time.sleep(3)
# noinspection PyBroadException
try:
os.kill(config.sysStore.Console_pid, signal.SIGKILL) # with predjudice
except:
pass # probably already gone
os._exit(0)
except Exception as E:
DevPrint('Exception in WatchdogDying: {}'.format(E))
time.sleep(1)
os._exit(0)
def failsafedeath():
DevPrint('Failsafe exit hook')
DevPrint('failsafedeath {} watching {} at {}'.format(os.getpid(), config.sysStore.Console_pid, time.time()))
os.kill(config.sysStore.Console_pid, signal.SIGUSR1)
time.sleep(3)
os.kill(config.sysStore.Console_pid, signal.SIGKILL) # with predjudice
# noinspection PyUnusedLocal
def IgnoreHUP(signum, frame):
DevPrint('Watchdog got HUP - ignoring')
def MasterWatchDog():
signal.signal(signal.SIGTERM, WatchdogDying) # don't want the sig handlers from the main console
signal.signal(signal.SIGINT, EndWatchDog)
signal.signal(signal.SIGUSR1, EndWatchDog)
signal.signal(signal.SIGHUP, IgnoreHUP)
# failsafehooks.hook()
atexit.register(failsafedeath)
DevPrint('Master Watchdog Started {} for console pid: {}'.format(os.getpid(), config.sysStore.Console_pid))
runningok = True
while runningok:
while KeepAlive.wait(FailsafeInterval):
KeepAlive.clear()
time.sleep(FailsafeInterval)
runningok = False # no keepalive seen for failsafe interval - try to restart
DevPrint('No keepalive in failsafe interval')
DevPrint('Watchdog loop exit: {}'.format(time.time()))
# noinspection PyBroadException
try:
os.kill(config.sysStore.Console_pid, 0)
except:
DevPrint('Normal watchdog exit')
return
DevPrint('Failsafe interrupt {}'.format(config.sysStore.Console_pid))
os.kill(config.sysStore.Console_pid, signal.SIGUSR1)
time.sleep(3) # wait for exit to complete
try:
os.kill(config.sysStore.Console_pid, 0) # check if console exited - raises exception if it is gone
DevPrint("Failsafe watchdog interrupt didn't reset - killing {}".format(config.sysStore.Console_pid))
os.kill(config.sysStore.Console_pid, signal.SIGKILL)
DevPrint("Failsafe exiting after kill attempt")
except Exception as E:
print('Failsafe exiting')
DevPrint("Failsafe successfully ended console (pid: {}), failsafe (pid: {}) exiting (Exc: {})".format(
config.sysStore.Console_pid,
os.getpid(),repr(E)))
DevPrint('Watchdog exiting')
|
{
"content_hash": "e26aaaa88d114b899612575b29e51d14",
"timestamp": "",
"source": "github",
"line_count": 144,
"max_line_length": 121,
"avg_line_length": 34.61805555555556,
"alnum_prop": 0.724974924774323,
"repo_name": "kevinkahn/softconsole",
"id": "b16758002fff5931ce173ce6050346f3725a40c4",
"size": "4985",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "failsafe.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Euphoria",
"bytes": "267"
},
{
"name": "Python",
"bytes": "839903"
},
{
"name": "Shell",
"bytes": "101927"
}
],
"symlink_target": ""
}
|
import types
import unittest
try:
from io import StringIO
except ImportError:
from StringIO import StringIO
from mock import patch, MagicMock
from binstar_client import errors
from conda_env.specs import binstar
from conda_env.specs.binstar import BinstarSpec
from conda_env.env import Environment
class TestBinstarSpec(unittest.TestCase):
def test_has_can_handle_method(self):
spec = BinstarSpec()
self.assertTrue(hasattr(spec, 'can_handle'))
self.assertIsInstance(spec.can_handle, types.MethodType)
def test_name_not_present(self):
spec = BinstarSpec(filename='filename')
self.assertEqual(spec.can_handle(), False)
self.assertEqual(spec.msg, "Can't process without a name")
def test_invalid_name(self):
spec = BinstarSpec(name='invalid-name')
self.assertEqual(spec.can_handle(), False)
self.assertEqual(spec.msg, "Invalid name, try the format: user/package")
def test_package_not_exist(self):
with patch('conda_env.specs.binstar.get_binstar') as get_binstar_mock:
package = MagicMock(side_effect=errors.NotFound('msg'))
binstar = MagicMock(package=package)
get_binstar_mock.return_value = binstar
spec = BinstarSpec(name='darth/no-exist')
self.assertEqual(spec.package, None)
self.assertEqual(spec.can_handle(), False)
def test_package_without_environment_file(self):
with patch('conda_env.specs.binstar.get_binstar') as get_binstar_mock:
package = MagicMock(return_value={'files': []})
binstar = MagicMock(package=package)
get_binstar_mock.return_value = binstar
spec = BinstarSpec('darth/no-env-file')
self.assertEqual(spec.can_handle(), False)
def test_download_environment(self):
fake_package = {
'files': [{'type': 'env', 'version': '1', 'basename': 'environment.yml'}]
}
yml = StringIO()
yml.write(u"name: env")
yml.seek(0)
fake_req = MagicMock(raw=yml)
with patch('conda_env.specs.binstar.get_binstar') as get_binstar_mock:
package = MagicMock(return_value=fake_package)
downloader = MagicMock(return_value=fake_req)
binstar = MagicMock(package=package, download=downloader)
get_binstar_mock.return_value = binstar
spec = BinstarSpec(name='darth/env-file')
self.assertIsInstance(spec.environment, Environment)
def test_environment_version_sorting(self):
fake_package = {
'files': [
{'type': 'env', 'version': '0.1.1', 'basename': 'environment.yml'},
{'type': 'env', 'version': '0.1a.2', 'basename': 'environment.yml'},
{'type': 'env', 'version': '0.2.0', 'basename': 'environment.yml'},
]
}
yml = StringIO()
yml.write(u"name: env")
yml.seek(0)
fake_req = MagicMock(raw=yml)
with patch('conda_env.specs.binstar.get_binstar') as get_binstar_mock:
package = MagicMock(return_value=fake_package)
downloader = MagicMock(return_value=fake_req)
binstar = MagicMock(package=package, download=downloader)
get_binstar_mock.return_value = binstar
spec = BinstarSpec(name='darth/env-file')
spec.environment
downloader.assert_called_with('darth', 'env-file', '0.2.0', 'environment.yml')
def test_binstar_not_installed(self):
spec = BinstarSpec(name='user/package')
spec.binstar = None
self.assertFalse(spec.can_handle())
self.assertEqual(spec.msg, 'Please install binstar')
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "8e705d151b588c6bdf709bef46d79008",
"timestamp": "",
"source": "github",
"line_count": 96,
"max_line_length": 90,
"avg_line_length": 39.333333333333336,
"alnum_prop": 0.6226165254237288,
"repo_name": "dan-blanchard/conda-env",
"id": "f42917cd277074595bd7836943482316af1ab25d",
"size": "3776",
"binary": false,
"copies": "6",
"ref": "refs/heads/develop",
"path": "tests/specs/test_binstar.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "3386"
},
{
"name": "Python",
"bytes": "61503"
},
{
"name": "Shell",
"bytes": "4746"
}
],
"symlink_target": ""
}
|
import os
import shutil
import tempfile
from xml.etree import ElementTree
from devil.utils import cmd_helper
from pylib import constants
DEXDUMP_PATH = os.path.join(constants.ANDROID_SDK_TOOLS, 'dexdump')
def Dump(apk_path):
"""Dumps class and method information from a APK into a dict via dexdump.
Args:
apk_path: An absolute path to an APK file to dump.
Returns:
A dict in the following format:
{
<package_name>: {
'classes': {
<class_name>: {
'methods': [<method_1>, <method_2>]
}
}
}
}
"""
# TODO(mikecase): Support multi-dex
try:
dexfile_dir = tempfile.mkdtemp()
# Python zipfile module is unable to unzip APKs.
cmd_helper.RunCmd(['unzip', apk_path, 'classes.dex'], cwd=dexfile_dir)
dexfile = os.path.join(dexfile_dir, 'classes.dex')
output_xml = cmd_helper.GetCmdOutput([DEXDUMP_PATH, '-l', 'xml', dexfile])
return _ParseRootNode(ElementTree.fromstring(output_xml))
finally:
shutil.rmtree(dexfile_dir)
def _ParseRootNode(root):
"""Parses the XML output of dexdump. This output is in the following format.
This is a subset of the information contained within dexdump output.
<api>
<package name="foo.bar">
<class name="Class" extends="foo.bar.SuperClass">
<field name="Field">
</field>
<constructor name="Method">
<parameter name="Param" type="int">
</parameter>
</constructor>
<method name="Method">
<parameter name="Param" type="int">
</parameter>
</method>
</class>
</package>
</api>
"""
results = {}
for child in root:
if child.tag == 'package':
package_name = child.attrib['name']
parsed_node = _ParsePackageNode(child)
if package_name in results:
results[package_name]['classes'].update(parsed_node['classes'])
else:
results[package_name] = parsed_node
return results
def _ParsePackageNode(package_node):
"""Parses a <package> node from the dexdump xml output.
Returns:
A dict in the format:
{
'classes': {
<class_1>: {
'methods': [<method_1>, <method_2>]
},
<class_2>: {
'methods': [<method_1>, <method_2>]
},
}
}
"""
classes = {}
for child in package_node:
if child.tag == 'class':
classes[child.attrib['name']] = _ParseClassNode(child)
return {'classes': classes}
def _ParseClassNode(class_node):
"""Parses a <class> node from the dexdump xml output.
Returns:
A dict in the format:
{
'methods': [<method_1>, <method_2>]
}
"""
methods = []
for child in class_node:
if child.tag == 'method':
methods.append(child.attrib['name'])
return {'methods': methods, 'superclass': class_node.attrib['extends']}
|
{
"content_hash": "fe823de4342d76baaa9223bd2a1ce9ac",
"timestamp": "",
"source": "github",
"line_count": 111,
"max_line_length": 78,
"avg_line_length": 26.09009009009009,
"alnum_prop": 0.5939226519337016,
"repo_name": "endlessm/chromium-browser",
"id": "2bb11ce198ec5447993060371eed4d03f673dcbf",
"size": "3059",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "build/android/pylib/utils/dexdump.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
from frappe.tests.utils import FrappeTestCase
class TestGender(FrappeTestCase):
pass
|
{
"content_hash": "a19d289948c7372f1797dfbabbf4ba65",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 45,
"avg_line_length": 17.6,
"alnum_prop": 0.8295454545454546,
"repo_name": "frappe/frappe",
"id": "1b428521b70eb53f92b8f1ac7cc5f718afb56f79",
"size": "175",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "frappe/contacts/doctype/gender/test_gender.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "65093"
},
{
"name": "HTML",
"bytes": "250850"
},
{
"name": "JavaScript",
"bytes": "2523337"
},
{
"name": "Less",
"bytes": "10921"
},
{
"name": "Python",
"bytes": "3618097"
},
{
"name": "SCSS",
"bytes": "261690"
},
{
"name": "Vue",
"bytes": "98456"
}
],
"symlink_target": ""
}
|
import re
db_tag = db.plugin_tagging_tag
db_link = db.plugin_tagging_link
def index():
table_name=request.args(0)
record_id=request.args(1)
if not auth.user_id:
return ''
if table_name!='0' and not (table_name in db.tables and record_id):
raise HTTP(404)
form = SQLFORM.factory(Field('tag_name'))
if request.vars.tag_name:
for item in request.vars.tag_name.split(','):
tag_name = re.compile('\s+').sub(' ',item).strip()
if not tag_name[-1:]=='/': tag_name+='/'
tag_exists = tag = db(db_tag.name==tag_name).select().first()
if not tag_exists:
tag = db_tag.insert(name=tag_name, links=1)
link = db(db_link.tag==tag.id)\
(db_link.table_name==table_name)\
(db_link.record_id==record_id).select().first()
if not link:
db_link.insert(tag=tag.id,
table_name=table_name,record_id=record_id)
if tag_exists:
tag.update_record(links=tag.links+1)
for key in request.vars:
if key[:6]=='delete':
link_id=key[6:]
link=db_link[link_id]
del db_link[link_id]
db_tag[link.tag] = dict(links=db_tag[link.tag].links-1)
links = db(db_link.table_name==table_name)\
(db_link.record_id==record_id).select()\
.sort(lambda row: row.tag.name.upper())
return dict(links=links, form=form)
def tag_cloud():
tags = db(db_tag.links>0).select()
if tags:
mc = max([tag.links for tag in tags])
return DIV(_class='plugin_tagging_tag_cloud',
*[SPAN(tag.name[:-1]+' ',_style='font-size:%sem' \
% (0.8+1.0*tag.links/mc)) for tag in tags])
|
{
"content_hash": "30cecc5cde39f20ff52b2d31c6d14d18",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 73,
"avg_line_length": 38.851063829787236,
"alnum_prop": 0.5273822562979189,
"repo_name": "OpenTreeOfLife/opentree",
"id": "8608137f7f55c633b640d9afcab32b8d859a45a5",
"size": "1826",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "webapp/controllers/plugin_tagging.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "91586"
},
{
"name": "Go",
"bytes": "6808"
},
{
"name": "HTML",
"bytes": "728798"
},
{
"name": "JavaScript",
"bytes": "2145214"
},
{
"name": "Less",
"bytes": "159886"
},
{
"name": "Makefile",
"bytes": "613"
},
{
"name": "PHP",
"bytes": "52477"
},
{
"name": "Python",
"bytes": "750874"
},
{
"name": "Shell",
"bytes": "4890"
}
],
"symlink_target": ""
}
|
""" patrol_smach.py - Version 1.0 2013-04-12
Control a robot to patrol a square area using SMACH
Created for the Pi Robot Project: http://www.pirobot.org
Copyright (c) 2013 Patrick Goebel. All rights reserved.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.5
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details at:
http://www.gnu.org/licenses/gpl.htmlPoint
"""
import rospy
from smach import State, StateMachine
from smach_ros import SimpleActionState, IntrospectionServer
from geometry_msgs.msg import Twist
from rbx2_tasks.task_setup import *
class Patrol():
def __init__(self):
rospy.init_node('patrol_smach', anonymous=False)
# Set the shutdown function (stop the robot)
rospy.on_shutdown(self.shutdown)
# Initialize a number of parameters and variables
setup_task_environment(self)
# Track success rate of getting to the goal locations
self.n_succeeded = 0
self.n_aborted = 0
self.n_preempted = 0
# A list to hold then navigation waypoints
nav_states = list()
# Turn the waypoints into SMACH states
for waypoint in self.waypoints:
nav_goal = MoveBaseGoal()
nav_goal.target_pose.header.frame_id = 'base_footprint'
nav_goal.target_pose.pose = waypoint
move_base_state = SimpleActionState('move_base', MoveBaseAction, goal=nav_goal, result_cb=self.move_base_result_cb,
exec_timeout=rospy.Duration(10.0),
server_wait_timeout=rospy.Duration(10.0))
nav_states.append(move_base_state)
# Initialize the patrol state machine
self.sm_patrol = StateMachine(outcomes=['succeeded','aborted','preempted'])
# Add the states to the state machine with the appropriate transitions
with self.sm_patrol:
StateMachine.add('NAV_STATE_0', nav_states[0], transitions={'succeeded':'NAV_STATE_1','aborted':'NAV_STATE_1','preempted':'NAV_STATE_1'})
StateMachine.add('NAV_STATE_1', nav_states[1], transitions={'succeeded':'NAV_STATE_2','aborted':'NAV_STATE_2','preempted':'NAV_STATE_2'})
StateMachine.add('NAV_STATE_2', nav_states[2], transitions={'succeeded':'NAV_STATE_3','aborted':'NAV_STATE_3','preempted':'NAV_STATE_3'})
StateMachine.add('NAV_STATE_3', nav_states[3], transitions={'succeeded':'NAV_STATE_4','aborted':'NAV_STATE_4','preempted':'NAV_STATE_4'})
StateMachine.add('NAV_STATE_4', nav_states[0], transitions={'succeeded':'','aborted':'','preempted':''})
# Create and start the SMACH introspection server
intro_server = IntrospectionServer('patrol', self.sm_patrol, '/SM_ROOT')
intro_server.start()
# Execute the state machine for the specified number of patrols
while (self.n_patrols == -1 or self.patrol_count < self.n_patrols) and not rospy.is_shutdown():
sm_outcome = self.sm_patrol.execute()
self.patrol_count += 1
rospy.loginfo("FINISHED PATROL LOOP: " + str(self.patrol_count))
rospy.loginfo('State Machine Outcome: ' + str(sm_outcome))
intro_server.stop()
def move_base_result_cb(self, userdata, status, result):
if status == actionlib.GoalStatus.SUCCEEDED:
self.n_succeeded += 1
elif status == actionlib.GoalStatus.ABORTED:
self.n_aborted += 1
elif status == actionlib.GoalStatus.PREEMPTED:
self.n_preempted += 1
try:
rospy.loginfo("Success rate: " + str(100.0 * self.n_succeeded / (self.n_succeeded + self.n_aborted + self.n_preempted)))
except:
pass
def shutdown(self):
rospy.loginfo("Stopping the robot...")
self.sm_patrol.request_preempt()
self.cmd_vel_pub.publish(Twist())
rospy.sleep(1)
if __name__ == '__main__':
try:
Patrol()
except rospy.ROSInterruptException:
rospy.loginfo("SMACH test finished.")
|
{
"content_hash": "eba459ae5ac18a173368b62a6bf9678a",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 149,
"avg_line_length": 43.21495327102804,
"alnum_prop": 0.6182958477508651,
"repo_name": "Aharobot/inmoov_ros",
"id": "36a31f841954ee707ebc4b79baaf23693477fb26",
"size": "4647",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "robbie_test/nodes/robbie_smach.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Arduino",
"bytes": "86350"
},
{
"name": "C",
"bytes": "1520"
},
{
"name": "C++",
"bytes": "6521"
},
{
"name": "CMake",
"bytes": "2200"
},
{
"name": "Processing",
"bytes": "1678"
},
{
"name": "Python",
"bytes": "547951"
},
{
"name": "Shell",
"bytes": "147"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import datetime
import django
from django.db import models
from django.template import Library
from django.template.loader import get_template
from django.utils.safestring import mark_safe
from django.utils.encoding import force_text
from django.utils.html import format_html
from django.utils.translation import ugettext as _
from django.core.exceptions import ObjectDoesNotExist
from django.contrib.admin.templatetags.admin_list import (
ResultList, result_headers,
)
from django.contrib.admin.utils import (
display_for_field, display_for_value, lookup_field,
)
from ..views import PAGE_VAR, SEARCH_VAR
register = Library()
def items_for_result(view, result):
"""
Generates the actual list of data.
"""
model_admin = view.model_admin
for field_name in view.list_display:
empty_value_display = model_admin.get_empty_value_display()
row_classes = ['field-%s' % field_name]
try:
f, attr, value = lookup_field(field_name, result, model_admin)
except ObjectDoesNotExist:
result_repr = empty_value_display
else:
empty_value_display = getattr(attr, 'empty_value_display', empty_value_display)
if f is None or f.auto_created:
allow_tags = getattr(attr, 'allow_tags', False)
boolean = getattr(attr, 'boolean', False)
if boolean or not value:
allow_tags = True
if django.VERSION >= (1, 9):
result_repr = display_for_value(value, empty_value_display, boolean)
else:
result_repr = display_for_value(value, boolean)
# Strip HTML tags in the resulting text, except if the
# function has an "allow_tags" attribute set to True.
if allow_tags:
result_repr = mark_safe(result_repr)
if isinstance(value, (datetime.date, datetime.time)):
row_classes.append('nowrap')
else:
if isinstance(f, models.ManyToOneRel):
field_val = getattr(result, f.name)
if field_val is None:
result_repr = empty_value_display
else:
result_repr = field_val
else:
if django.VERSION >= (1, 9):
result_repr = display_for_field(value, f, empty_value_display)
else:
result_repr = display_for_field(value, f)
if isinstance(f, (models.DateField, models.TimeField, models.ForeignKey)):
row_classes.append('nowrap')
if force_text(result_repr) == '':
result_repr = mark_safe(' ')
row_classes.extend(model_admin.get_extra_class_names_for_field_col(field_name, result))
row_attributes_dict = model_admin.get_extra_attrs_for_field_col(field_name, result)
row_attributes_dict['class'] = ' ' . join(row_classes)
row_attributes = ''.join(' %s="%s"' % (key, val) for key, val in row_attributes_dict.items())
row_attributes_safe = mark_safe(row_attributes)
yield format_html('<td{}>{}</td>', row_attributes_safe, result_repr)
def results(view, object_list):
for item in object_list:
yield ResultList(None, items_for_result(view, item))
@register.inclusion_tag("wagtailmodeladmin/includes/result_list.html",
takes_context=True)
def result_list(context):
"""
Displays the headers and data list together
"""
view = context['view']
object_list = context['object_list']
headers = list(result_headers(view))
num_sorted_fields = 0
for h in headers:
if h['sortable'] and h['sorted']:
num_sorted_fields += 1
context.update({
'result_headers': headers,
'num_sorted_fields': num_sorted_fields,
'results': list(results(view, object_list))})
return context
@register.simple_tag
def pagination_link_previous(current_page, view):
if current_page.has_previous():
previous_page_number0 = current_page.previous_page_number() - 1
return format_html(
'<li class="prev"><a href="%s" class="icon icon-arrow-left">%s</a></li>' %
(view.get_query_string({PAGE_VAR: previous_page_number0}), _('Previous'))
)
return ''
@register.simple_tag
def pagination_link_next(current_page, view):
if current_page.has_next():
next_page_number0 = current_page.next_page_number() - 1
return format_html(
'<li class="next"><a href="%s" class="icon icon-arrow-right-after">%s</a></li>' %
(view.get_query_string({PAGE_VAR: next_page_number0}), _('Next'))
)
return ''
@register.inclusion_tag("wagtailmodeladmin/includes/search_form.html",
takes_context=True)
def search_form(context):
context.update({'search_var': SEARCH_VAR})
return context
@register.simple_tag
def admin_list_filter(view, spec):
template_name = spec.template
if template_name == 'admin/filter.html':
template_name = 'wagtailmodeladmin/includes/filter.html'
tpl = get_template(template_name)
return tpl.render({
'title': spec.title,
'choices': list(spec.choices(view)),
'spec': spec,
})
@register.inclusion_tag("wagtailmodeladmin/includes/result_row.html",
takes_context=True)
def result_row_display(context, index=0):
obj = context['object_list'][index]
view = context['view']
context.update({
'obj': obj,
'action_buttons': view.get_buttons_for_obj(obj),
})
return context
@register.inclusion_tag("wagtailmodeladmin/includes/result_row_value.html",
takes_context=True)
def result_row_value_display(context, index=0):
add_action_buttons = False
item = context['item']
closing_tag = mark_safe(item[-5:])
request = context['request']
modeladmin = context['view'].model_admin
field_name = modeladmin.get_list_display(request)[index]
if field_name == modeladmin.get_list_display_add_buttons(request):
add_action_buttons = True
item = mark_safe(item[0:-5])
context.update({
'item': item,
'add_action_buttons': add_action_buttons,
'closing_tag': closing_tag,
})
return context
@register.filter
def get_content_type_for_obj(obj):
"""
Return the model name/"content type" as a string
e.g BlogPage, NewsListingPage.
Can be used with "slugify" to create CSS-friendly classnames
Usage: {{ self|content_type|slugify }}
"""
return obj.__class__._meta.verbose_name
|
{
"content_hash": "914afdff4b1948592d6458d975a90a5b",
"timestamp": "",
"source": "github",
"line_count": 188,
"max_line_length": 101,
"avg_line_length": 36.170212765957444,
"alnum_prop": 0.610735294117647,
"repo_name": "ababic/wagtailmodeladmin",
"id": "213a8f2e7db534924fb90276e35e283f0f1e5f12",
"size": "6800",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "wagtailmodeladmin/templatetags/wagtailmodeladmin_tags.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2647"
},
{
"name": "HTML",
"bytes": "14218"
},
{
"name": "Python",
"bytes": "91261"
}
],
"symlink_target": ""
}
|
"""This script outputs the package name specified in the pubspec.yaml"""
import argparse
import os
import sys
# TODO(johnmccutchan): Use the yaml package to parse.
def PackageName(line):
assert line.startswith("name:")
return line.split(":")[1].strip()
def main(pubspec_file):
source_file = open(pubspec_file, "r")
for line in source_file:
if line.startswith("name:"):
print(PackageName(line))
return 0
source_file.close()
# Couldn't find it.
return -1
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="This script outputs the package name specified in the"
"pubspec.yaml")
parser.add_argument("--pubspec",
dest="pubspec_file",
metavar="<pubspec-file>",
type=str,
required=True,
help="Path to pubspec file")
args = parser.parse_args()
sys.exit(main(args.pubspec_file))
|
{
"content_hash": "d77cd466fbfbd9d582f7830904dfa252",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 73,
"avg_line_length": 29.363636363636363,
"alnum_prop": 0.6016511867905057,
"repo_name": "jamesr/sky_engine",
"id": "db8c9e93193edba6a88f78e496cb01fcb1d66c41",
"size": "1156",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "build/dart/tools/dart_package_name.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "2706"
},
{
"name": "C",
"bytes": "304169"
},
{
"name": "C++",
"bytes": "22856719"
},
{
"name": "Dart",
"bytes": "1299416"
},
{
"name": "Groff",
"bytes": "29030"
},
{
"name": "Java",
"bytes": "769231"
},
{
"name": "JavaScript",
"bytes": "27365"
},
{
"name": "Makefile",
"bytes": "402"
},
{
"name": "Objective-C",
"bytes": "136889"
},
{
"name": "Objective-C++",
"bytes": "431695"
},
{
"name": "Python",
"bytes": "2890060"
},
{
"name": "Shell",
"bytes": "173354"
},
{
"name": "Yacc",
"bytes": "31141"
},
{
"name": "nesC",
"bytes": "18347"
}
],
"symlink_target": ""
}
|
import numpy as np
from hyperion.model import AnalyticalYSOModel
from hyperion.util.constants import rsun, lsun, au, msun, yr, c
# Initalize the model
m = AnalyticalYSOModel()
# Read in stellar spectrum
wav, fnu = np.loadtxt('kt04000g+3.5z-2.0.ascii', unpack=True)
nu = c / (wav * 1.e-4)
# Set the stellar parameters
m.star.radius = 2.09 * rsun
m.star.spectrum = (nu, fnu)
m.star.luminosity = lsun
m.star.mass = 0.5 * msun
# Add a flared disk
disk = m.add_flared_disk()
disk.mass = 0.01 * msun
disk.rmin = 7 * m.star.radius
disk.rmax = 200 * au
disk.r_0 = m.star.radius
disk.h_0 = 0.01 * disk.r_0
disk.p = -1.0
disk.beta = 1.25
disk.dust = 'kmh_lite.hdf5'
# Add an Ulrich envelope
envelope = m.add_ulrich_envelope()
envelope.rc = disk.rmax
envelope.mdot = 5.e-6 * msun / yr
envelope.rmin = 7 * m.star.radius
envelope.rmax = 5000 * au
envelope.dust = 'kmh_lite.hdf5'
# Add a bipolar cavity
cavity = envelope.add_bipolar_cavity()
cavity.power = 1.5
cavity.theta_0 = 20
cavity.r_0 = envelope.rmax
cavity.rho_0 = 5e4 * 3.32e-24
cavity.rho_exp = 0.
cavity.dust = 'kmh_lite.hdf5'
# Use raytracing to improve s/n of thermal/source emission
m.set_raytracing(True)
# Use the modified random walk
m.set_mrw(True, gamma=2.)
# Set up grid
m.set_spherical_polar_grid_auto(399, 199, 1)
# Set up SED
sed = m.add_peeled_images(sed=True, image=False)
sed.set_viewing_angles(np.linspace(0., 90., 10), np.repeat(45., 10))
sed.set_wavelength_range(150, 0.02, 2000.)
# Set number of photons
m.set_n_photons(initial=1e6, imaging=1e6,
raytracing_sources=1e4, raytracing_dust=1e6)
# Set number of temperature iterations and convergence criterion
m.set_n_initial_iterations(10)
m.set_convergence(True, percentile=99.0, absolute=2.0, relative=1.1)
# Write out file
m.write('class1_example.rtin')
m.run('class1_example.rtout', mpi=True)
|
{
"content_hash": "bd1ca6e7da0e579b7a8f86fcd1b1d177",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 68,
"avg_line_length": 25.91549295774648,
"alnum_prop": 0.7076086956521739,
"repo_name": "bluescarni/hyperion",
"id": "219f3e20906e23408c2dc83b8406c6da0dafa425",
"size": "1840",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "docs/tutorials/scripts/class1_example_setup.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "50946"
},
{
"name": "CSS",
"bytes": "97"
},
{
"name": "FORTRAN",
"bytes": "555248"
},
{
"name": "Python",
"bytes": "886196"
}
],
"symlink_target": ""
}
|
"""
Instrument to report system (CPU, memory, network) and
process (CPU, memory, garbage collection) metrics. By default, the
following metrics are configured:
.. code:: python
{
"system.cpu.time": ["idle", "user", "system", "irq"],
"system.cpu.utilization": ["idle", "user", "system", "irq"],
"system.memory.usage": ["used", "free", "cached"],
"system.memory.utilization": ["used", "free", "cached"],
"system.swap.usage": ["used", "free"],
"system.swap.utilization": ["used", "free"],
"system.disk.io": ["read", "write"],
"system.disk.operations": ["read", "write"],
"system.disk.time": ["read", "write"],
"system.network.dropped.packets": ["transmit", "receive"],
"system.network.packets": ["transmit", "receive"],
"system.network.errors": ["transmit", "receive"],
"system.network.io": ["transmit", "receive"],
"system.network.connections": ["family", "type"],
"system.thread_count": None
"runtime.memory": ["rss", "vms"],
"runtime.cpu.time": ["user", "system"],
}
Usage
-----
.. code:: python
from opentelemetry.metrics import set_meter_provider
from opentelemetry.instrumentation.system_metrics import SystemMetricsInstrumentor
from opentelemetry.sdk.metrics import MeterProvider
from opentelemetry.sdk.metrics.export import ConsoleMetricExporter, PeriodicExportingMetricReader
exporter = ConsoleMetricExporter()
set_meter_provider(MeterProvider([PeriodicExportingMetricReader(exporter)]))
SystemMetricsInstrumentor().instrument()
# metrics are collected asynchronously
input("...")
# to configure custom metrics
configuration = {
"system.memory.usage": ["used", "free", "cached"],
"system.cpu.time": ["idle", "user", "system", "irq"],
"system.network.io": ["transmit", "receive"],
"runtime.memory": ["rss", "vms"],
"runtime.cpu.time": ["user", "system"],
}
SystemMetricsInstrumentor(config=configuration).instrument()
API
---
"""
import gc
import os
import threading
from platform import python_implementation
from typing import Collection, Dict, Iterable, List, Optional
import psutil
# FIXME Remove this pyling disabling line when Github issue is cleared
# pylint: disable=no-name-in-module
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
from opentelemetry.instrumentation.system_metrics.package import _instruments
from opentelemetry.instrumentation.system_metrics.version import __version__
from opentelemetry.metrics import CallbackOptions, Observation, get_meter
from opentelemetry.sdk.util import get_dict_as_key
_DEFAULT_CONFIG = {
"system.cpu.time": ["idle", "user", "system", "irq"],
"system.cpu.utilization": ["idle", "user", "system", "irq"],
"system.memory.usage": ["used", "free", "cached"],
"system.memory.utilization": ["used", "free", "cached"],
"system.swap.usage": ["used", "free"],
"system.swap.utilization": ["used", "free"],
"system.disk.io": ["read", "write"],
"system.disk.operations": ["read", "write"],
"system.disk.time": ["read", "write"],
"system.network.dropped.packets": ["transmit", "receive"],
"system.network.packets": ["transmit", "receive"],
"system.network.errors": ["transmit", "receive"],
"system.network.io": ["transmit", "receive"],
"system.network.connections": ["family", "type"],
"system.thread_count": None,
"runtime.memory": ["rss", "vms"],
"runtime.cpu.time": ["user", "system"],
"runtime.gc_count": None,
}
class SystemMetricsInstrumentor(BaseInstrumentor):
def __init__(
self,
labels: Optional[Dict[str, str]] = None,
config: Optional[Dict[str, List[str]]] = None,
):
super().__init__()
if config is None:
self._config = _DEFAULT_CONFIG
else:
self._config = config
self._labels = {} if labels is None else labels
self._meter = None
self._python_implementation = python_implementation().lower()
self._proc = psutil.Process(os.getpid())
self._system_cpu_time_labels = self._labels.copy()
self._system_cpu_utilization_labels = self._labels.copy()
self._system_memory_usage_labels = self._labels.copy()
self._system_memory_utilization_labels = self._labels.copy()
self._system_swap_usage_labels = self._labels.copy()
self._system_swap_utilization_labels = self._labels.copy()
self._system_disk_io_labels = self._labels.copy()
self._system_disk_operations_labels = self._labels.copy()
self._system_disk_time_labels = self._labels.copy()
self._system_disk_merged_labels = self._labels.copy()
self._system_network_dropped_packets_labels = self._labels.copy()
self._system_network_packets_labels = self._labels.copy()
self._system_network_errors_labels = self._labels.copy()
self._system_network_io_labels = self._labels.copy()
self._system_network_connections_labels = self._labels.copy()
self._system_thread_count_labels = self._labels.copy()
self._runtime_memory_labels = self._labels.copy()
self._runtime_cpu_time_labels = self._labels.copy()
self._runtime_gc_count_labels = self._labels.copy()
def instrumentation_dependencies(self) -> Collection[str]:
return _instruments
def _instrument(self, **kwargs):
# pylint: disable=too-many-branches
meter_provider = kwargs.get("meter_provider")
self._meter = get_meter(
__name__,
__version__,
meter_provider,
)
if "system.cpu.time" in self._config:
self._meter.create_observable_counter(
name="system.cpu.time",
callbacks=[self._get_system_cpu_time],
description="System CPU time",
unit="seconds",
)
if "system.cpu.utilization" in self._config:
self._meter.create_observable_gauge(
name="system.cpu.utilization",
callbacks=[self._get_system_cpu_utilization],
description="System CPU utilization",
unit="1",
)
if "system.memory.usage" in self._config:
self._meter.create_observable_gauge(
name="system.memory.usage",
callbacks=[self._get_system_memory_usage],
description="System memory usage",
unit="bytes",
)
if "system.memory.utilization" in self._config:
self._meter.create_observable_gauge(
name="system.memory.utilization",
callbacks=[self._get_system_memory_utilization],
description="System memory utilization",
unit="1",
)
if "system.swap.usage" in self._config:
self._meter.create_observable_gauge(
name="system.swap.usage",
callbacks=[self._get_system_swap_usage],
description="System swap usage",
unit="pages",
)
if "system.swap.utilization" in self._config:
self._meter.create_observable_gauge(
name="system.swap.utilization",
callbacks=[self._get_system_swap_utilization],
description="System swap utilization",
unit="1",
)
# TODO Add _get_system_swap_page_faults
# self._meter.create_observable_counter(
# name="system.swap.page_faults",
# callbacks=[self._get_system_swap_page_faults],
# description="System swap page faults",
# unit="faults",
# value_type=int,
# )
# TODO Add _get_system_swap_page_operations
# self._meter.create_observable_counter(
# name="system.swap.page_operations",
# callbacks=self._get_system_swap_page_operations,
# description="System swap page operations",
# unit="operations",
# value_type=int,
# )
if "system.disk.io" in self._config:
self._meter.create_observable_counter(
name="system.disk.io",
callbacks=[self._get_system_disk_io],
description="System disk IO",
unit="bytes",
)
if "system.disk.operations" in self._config:
self._meter.create_observable_counter(
name="system.disk.operations",
callbacks=[self._get_system_disk_operations],
description="System disk operations",
unit="operations",
)
if "system.disk.time" in self._config:
self._meter.create_observable_counter(
name="system.disk.time",
callbacks=[self._get_system_disk_time],
description="System disk time",
unit="seconds",
)
# TODO Add _get_system_filesystem_usage
# self.accumulator.register_valueobserver(
# callback=self._get_system_filesystem_usage,
# name="system.filesystem.usage",
# description="System filesystem usage",
# unit="bytes",
# value_type=int,
# )
# TODO Add _get_system_filesystem_utilization
# self._meter.create_observable_gauge(
# callback=self._get_system_filesystem_utilization,
# name="system.filesystem.utilization",
# description="System filesystem utilization",
# unit="1",
# value_type=float,
# )
# TODO Filesystem information can be obtained with os.statvfs in Unix-like
# OSs, how to do the same in Windows?
if "system.network.dropped.packets" in self._config:
self._meter.create_observable_counter(
name="system.network.dropped_packets",
callbacks=[self._get_system_network_dropped_packets],
description="System network dropped_packets",
unit="packets",
)
if "system.network.packets" in self._config:
self._meter.create_observable_counter(
name="system.network.packets",
callbacks=[self._get_system_network_packets],
description="System network packets",
unit="packets",
)
if "system.network.errors" in self._config:
self._meter.create_observable_counter(
name="system.network.errors",
callbacks=[self._get_system_network_errors],
description="System network errors",
unit="errors",
)
if "system.network.io" in self._config:
self._meter.create_observable_counter(
name="system.network.io",
callbacks=[self._get_system_network_io],
description="System network io",
unit="bytes",
)
if "system.network.connections" in self._config:
self._meter.create_observable_up_down_counter(
name="system.network.connections",
callbacks=[self._get_system_network_connections],
description="System network connections",
unit="connections",
)
if "system.thread_count" in self._config:
self._meter.create_observable_gauge(
name="system.thread_count",
callbacks=[self._get_system_thread_count],
description="System active threads count",
)
if "runtime.memory" in self._config:
self._meter.create_observable_counter(
name=f"runtime.{self._python_implementation}.memory",
callbacks=[self._get_runtime_memory],
description=f"Runtime {self._python_implementation} memory",
unit="bytes",
)
if "runtime.cpu.time" in self._config:
self._meter.create_observable_counter(
name=f"runtime.{self._python_implementation}.cpu_time",
callbacks=[self._get_runtime_cpu_time],
description=f"Runtime {self._python_implementation} CPU time",
unit="seconds",
)
if "runtime.gc_count" in self._config:
self._meter.create_observable_counter(
name=f"runtime.{self._python_implementation}.gc_count",
callbacks=[self._get_runtime_gc_count],
description=f"Runtime {self._python_implementation} GC count",
unit="bytes",
)
def _uninstrument(self, **__):
pass
def _get_system_cpu_time(
self, options: CallbackOptions
) -> Iterable[Observation]:
"""Observer callback for system CPU time"""
for cpu, times in enumerate(psutil.cpu_times(percpu=True)):
for metric in self._config["system.cpu.time"]:
if hasattr(times, metric):
self._system_cpu_time_labels["state"] = metric
self._system_cpu_time_labels["cpu"] = cpu + 1
yield Observation(
getattr(times, metric),
self._system_cpu_time_labels.copy(),
)
def _get_system_cpu_utilization(
self, options: CallbackOptions
) -> Iterable[Observation]:
"""Observer callback for system CPU utilization"""
for cpu, times_percent in enumerate(
psutil.cpu_times_percent(percpu=True)
):
for metric in self._config["system.cpu.utilization"]:
if hasattr(times_percent, metric):
self._system_cpu_utilization_labels["state"] = metric
self._system_cpu_utilization_labels["cpu"] = cpu + 1
yield Observation(
getattr(times_percent, metric) / 100,
self._system_cpu_utilization_labels.copy(),
)
def _get_system_memory_usage(
self, options: CallbackOptions
) -> Iterable[Observation]:
"""Observer callback for memory usage"""
virtual_memory = psutil.virtual_memory()
for metric in self._config["system.memory.usage"]:
self._system_memory_usage_labels["state"] = metric
if hasattr(virtual_memory, metric):
yield Observation(
getattr(virtual_memory, metric),
self._system_memory_usage_labels.copy(),
)
def _get_system_memory_utilization(
self, options: CallbackOptions
) -> Iterable[Observation]:
"""Observer callback for memory utilization"""
system_memory = psutil.virtual_memory()
for metric in self._config["system.memory.utilization"]:
self._system_memory_utilization_labels["state"] = metric
if hasattr(system_memory, metric):
yield Observation(
getattr(system_memory, metric) / system_memory.total,
self._system_memory_utilization_labels.copy(),
)
def _get_system_swap_usage(
self, options: CallbackOptions
) -> Iterable[Observation]:
"""Observer callback for swap usage"""
system_swap = psutil.swap_memory()
for metric in self._config["system.swap.usage"]:
self._system_swap_usage_labels["state"] = metric
if hasattr(system_swap, metric):
yield Observation(
getattr(system_swap, metric),
self._system_swap_usage_labels.copy(),
)
def _get_system_swap_utilization(
self, options: CallbackOptions
) -> Iterable[Observation]:
"""Observer callback for swap utilization"""
system_swap = psutil.swap_memory()
for metric in self._config["system.swap.utilization"]:
if hasattr(system_swap, metric):
self._system_swap_utilization_labels["state"] = metric
yield Observation(
getattr(system_swap, metric) / system_swap.total
if system_swap.total
else 0,
self._system_swap_utilization_labels.copy(),
)
def _get_system_disk_io(
self, options: CallbackOptions
) -> Iterable[Observation]:
"""Observer callback for disk IO"""
for device, counters in psutil.disk_io_counters(perdisk=True).items():
for metric in self._config["system.disk.io"]:
if hasattr(counters, f"{metric}_bytes"):
self._system_disk_io_labels["device"] = device
self._system_disk_io_labels["direction"] = metric
yield Observation(
getattr(counters, f"{metric}_bytes"),
self._system_disk_io_labels.copy(),
)
def _get_system_disk_operations(
self, options: CallbackOptions
) -> Iterable[Observation]:
"""Observer callback for disk operations"""
for device, counters in psutil.disk_io_counters(perdisk=True).items():
for metric in self._config["system.disk.operations"]:
if hasattr(counters, f"{metric}_count"):
self._system_disk_operations_labels["device"] = device
self._system_disk_operations_labels["direction"] = metric
yield Observation(
getattr(counters, f"{metric}_count"),
self._system_disk_operations_labels.copy(),
)
def _get_system_disk_time(
self, options: CallbackOptions
) -> Iterable[Observation]:
"""Observer callback for disk time"""
for device, counters in psutil.disk_io_counters(perdisk=True).items():
for metric in self._config["system.disk.time"]:
if hasattr(counters, f"{metric}_time"):
self._system_disk_time_labels["device"] = device
self._system_disk_time_labels["direction"] = metric
yield Observation(
getattr(counters, f"{metric}_time") / 1000,
self._system_disk_time_labels.copy(),
)
def _get_system_disk_merged(
self, options: CallbackOptions
) -> Iterable[Observation]:
"""Observer callback for disk merged operations"""
# FIXME The units in the spec is 1, it seems like it should be
# operations or the value type should be Double
for device, counters in psutil.disk_io_counters(perdisk=True).items():
for metric in self._config["system.disk.time"]:
if hasattr(counters, f"{metric}_merged_count"):
self._system_disk_merged_labels["device"] = device
self._system_disk_merged_labels["direction"] = metric
yield Observation(
getattr(counters, f"{metric}_merged_count"),
self._system_disk_merged_labels.copy(),
)
def _get_system_network_dropped_packets(
self, options: CallbackOptions
) -> Iterable[Observation]:
"""Observer callback for network dropped packets"""
for device, counters in psutil.net_io_counters(pernic=True).items():
for metric in self._config["system.network.dropped.packets"]:
in_out = {"receive": "in", "transmit": "out"}[metric]
if hasattr(counters, f"drop{in_out}"):
self._system_network_dropped_packets_labels[
"device"
] = device
self._system_network_dropped_packets_labels[
"direction"
] = metric
yield Observation(
getattr(counters, f"drop{in_out}"),
self._system_network_dropped_packets_labels.copy(),
)
def _get_system_network_packets(
self, options: CallbackOptions
) -> Iterable[Observation]:
"""Observer callback for network packets"""
for device, counters in psutil.net_io_counters(pernic=True).items():
for metric in self._config["system.network.dropped.packets"]:
recv_sent = {"receive": "recv", "transmit": "sent"}[metric]
if hasattr(counters, f"packets_{recv_sent}"):
self._system_network_packets_labels["device"] = device
self._system_network_packets_labels["direction"] = metric
yield Observation(
getattr(counters, f"packets_{recv_sent}"),
self._system_network_packets_labels.copy(),
)
def _get_system_network_errors(
self, options: CallbackOptions
) -> Iterable[Observation]:
"""Observer callback for network errors"""
for device, counters in psutil.net_io_counters(pernic=True).items():
for metric in self._config["system.network.errors"]:
in_out = {"receive": "in", "transmit": "out"}[metric]
if hasattr(counters, f"err{in_out}"):
self._system_network_errors_labels["device"] = device
self._system_network_errors_labels["direction"] = metric
yield Observation(
getattr(counters, f"err{in_out}"),
self._system_network_errors_labels.copy(),
)
def _get_system_network_io(
self, options: CallbackOptions
) -> Iterable[Observation]:
"""Observer callback for network IO"""
for device, counters in psutil.net_io_counters(pernic=True).items():
for metric in self._config["system.network.dropped.packets"]:
recv_sent = {"receive": "recv", "transmit": "sent"}[metric]
if hasattr(counters, f"bytes_{recv_sent}"):
self._system_network_io_labels["device"] = device
self._system_network_io_labels["direction"] = metric
yield Observation(
getattr(counters, f"bytes_{recv_sent}"),
self._system_network_io_labels.copy(),
)
def _get_system_network_connections(
self, options: CallbackOptions
) -> Iterable[Observation]:
"""Observer callback for network connections"""
# TODO How to find the device identifier for a particular
# connection?
connection_counters = {}
for net_connection in psutil.net_connections():
for metric in self._config["system.network.connections"]:
self._system_network_connections_labels["protocol"] = {
1: "tcp",
2: "udp",
}[net_connection.type.value]
self._system_network_connections_labels[
"state"
] = net_connection.status
self._system_network_connections_labels[metric] = getattr(
net_connection, metric
)
connection_counters_key = get_dict_as_key(
self._system_network_connections_labels
)
if connection_counters_key in connection_counters:
connection_counters[connection_counters_key]["counter"] += 1
else:
connection_counters[connection_counters_key] = {
"counter": 1,
"labels": self._system_network_connections_labels.copy(),
}
for connection_counter in connection_counters.values():
yield Observation(
connection_counter["counter"],
connection_counter["labels"],
)
def _get_system_thread_count(
self, options: CallbackOptions
) -> Iterable[Observation]:
"""Observer callback for active thread count"""
yield Observation(
threading.active_count(), self._system_thread_count_labels
)
def _get_runtime_memory(
self, options: CallbackOptions
) -> Iterable[Observation]:
"""Observer callback for runtime memory"""
proc_memory = self._proc.memory_info()
for metric in self._config["runtime.memory"]:
if hasattr(proc_memory, metric):
self._runtime_memory_labels["type"] = metric
yield Observation(
getattr(proc_memory, metric),
self._runtime_memory_labels.copy(),
)
def _get_runtime_cpu_time(
self, options: CallbackOptions
) -> Iterable[Observation]:
"""Observer callback for runtime CPU time"""
proc_cpu = self._proc.cpu_times()
for metric in self._config["runtime.cpu.time"]:
if hasattr(proc_cpu, metric):
self._runtime_cpu_time_labels["type"] = metric
yield Observation(
getattr(proc_cpu, metric),
self._runtime_cpu_time_labels.copy(),
)
def _get_runtime_gc_count(
self, options: CallbackOptions
) -> Iterable[Observation]:
"""Observer callback for garbage collection"""
for index, count in enumerate(gc.get_count()):
self._runtime_gc_count_labels["count"] = str(index)
yield Observation(count, self._runtime_gc_count_labels.copy())
|
{
"content_hash": "0248c40c46601713f5078fd231162c20",
"timestamp": "",
"source": "github",
"line_count": 635,
"max_line_length": 101,
"avg_line_length": 40.67874015748031,
"alnum_prop": 0.5633153962293368,
"repo_name": "open-telemetry/opentelemetry-python-contrib",
"id": "420bad5d66d57d31bab07e9d569297e54530b3de",
"size": "26415",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "instrumentation/opentelemetry-instrumentation-system-metrics/src/opentelemetry/instrumentation/system_metrics/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "211"
},
{
"name": "HTML",
"bytes": "118"
},
{
"name": "Python",
"bytes": "1960979"
},
{
"name": "Shell",
"bytes": "7256"
}
],
"symlink_target": ""
}
|
from rest_framework import serializers
from django.contrib.auth.hashers import make_password
from rest_framework.exceptions import ValidationError
from django.contrib.auth import get_user_model
from common.serializers import JobSerializer
from job_auth.serializers import UserSerializer
from .models import(
Employer,
EmployerJobs
)
class EmployerSerializer(serializers.ModelSerializer):
user = UserSerializer(required=True)
employer_location = serializers.CharField(read_only=True)
def create(self, validated_data):
try:
user = get_user_model().objects.get(
username=validated_data['user']['username'])
except get_user_model().DoesNotExist:
validated_data['user']['password'] = make_password(
validated_data['user']['password'])
user = get_user_model().objects.create(**validated_data['user'])
validated_data['user'] = user
return super(EmployerSerializer, self).create(validated_data)
class Meta:
model = Employer
class EmployerJobsSerializer(serializers.ModelSerializer):
job = JobSerializer(required=True)
employer_details = serializers.SerializerMethodField()
def create(self, validated_data):
job = self.initial_data['job']
job_serializer = JobSerializer(data=job)
if job_serializer.is_valid():
job_obj = job_serializer.save()
validated_data['job'] = job_obj
return super(EmployerJobsSerializer, self).create(validated_data)
else:
raise ValidationError(job_serializer.errors)
def get_employer_details(self, obj):
return EmployerSerializer(obj.employer).data
class Meta:
model = EmployerJobs
|
{
"content_hash": "cce0c07f4d06e83e894dd230427d7b62",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 77,
"avg_line_length": 35.08,
"alnum_prop": 0.6841505131128849,
"repo_name": "liyocee/job_hunt",
"id": "b7f9b3b5cbf1e3ee157843d8fb54663f6849f16f",
"size": "1754",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "backend/employer/serializers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "32420"
},
{
"name": "HTML",
"bytes": "13185"
},
{
"name": "JavaScript",
"bytes": "42477"
},
{
"name": "Python",
"bytes": "28358"
}
],
"symlink_target": ""
}
|
from datetime import date
from dateutil.easter import easter
from dateutil.relativedelta import relativedelta as rd, FR
from holidays.constants import JAN, APR, MAY, AUG, DEC
from holidays.constants import SUN
from holidays.holiday_base import HolidayBase
class Netherlands(HolidayBase):
country = "NL"
def __init__(self, **kwargs):
# http://www.iamsterdam.com/en/plan-your-trip/practical-info/public-holidays
HolidayBase.__init__(self, **kwargs)
def _populate(self, year):
# New years
self[date(year, JAN, 1)] = "Nieuwjaarsdag"
easter_date = easter(year)
# Easter
self[easter_date] = "Eerste paasdag"
# Good friday
self[easter_date + rd(weekday=FR(-1))] = "Goede Vrijdag"
# Second easter day
self[easter_date + rd(days=1)] = "Tweede paasdag"
# Ascension day
self[easter_date + rd(days=39)] = "Hemelvaart"
# Pentecost
self[easter_date + rd(days=49)] = "Eerste Pinksterdag"
# Pentecost monday
self[easter_date + rd(days=50)] = "Tweede Pinksterdag"
# First christmas
self[date(year, DEC, 25)] = "Eerste Kerstdag"
# Second christmas
self[date(year, DEC, 26)] = "Tweede Kerstdag"
# Liberation day
if year >= 1945 and year % 5 == 0:
self[date(year, MAY, 5)] = "Bevrijdingsdag"
# Kingsday
if year >= 2014:
kings_day = date(year, APR, 27)
if kings_day.weekday() == SUN:
kings_day = kings_day - rd(days=1)
self[kings_day] = "Koningsdag"
# Queen's day
if 1891 <= year <= 2013:
queens_day = date(year, APR, 30)
if year <= 1948:
queens_day = date(year, AUG, 31)
if queens_day.weekday() == SUN:
if year < 1980:
queens_day = queens_day + rd(days=1)
else:
queens_day = queens_day - rd(days=1)
self[queens_day] = "Koninginnedag"
class NL(Netherlands):
pass
class NLD(Netherlands):
pass
|
{
"content_hash": "58019212aab278029355ced927ae56b3",
"timestamp": "",
"source": "github",
"line_count": 80,
"max_line_length": 84,
"avg_line_length": 26.65,
"alnum_prop": 0.5605065666041276,
"repo_name": "ryanss/holidays.py",
"id": "cc76b418e467d11cd25c526bd82b56a4cbf97e56",
"size": "2634",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "holidays/countries/netherlands.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "214061"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import decimal
import json
from django.contrib.auth import logout
from django.contrib import messages
from django.core.urlresolvers import reverse_lazy, reverse
from django.http import HttpResponse
from django.shortcuts import render, redirect
from django.views.generic import DetailView
from django.views.generic import FormView
from django.views.generic import TemplateView
from django.views.generic import View
from braces.views import CsrfExemptMixin
from braces.views import FormValidMessageMixin
from braces.views import LoginRequiredMixin
from braces.views import SelectRelatedMixin
import stripe
from .forms import PlanForm, CancelSubscriptionForm
from .mixins import PaymentsContextMixin, SubscriptionMixin
from .models import CurrentSubscription
from .models import Customer
from .models import Event
from .models import EventProcessingException
from .settings import PLAN_LIST
from .settings import CANCELLATION_AT_PERIOD_END
from .settings import PRORATION_POLICY_FOR_UPGRADES
from .settings import PY3
from .settings import User
from .sync import sync_customer
class ChangeCardView(LoginRequiredMixin, PaymentsContextMixin, DetailView):
# TODO - needs tests
# Needs a form
# Not done yet
template_name = "djstripe/change_card.html"
def get_object(self):
if hasattr(self, "customer"):
return self.customer
self.customer, created = Customer.get_or_create(self.request.user)
return self.customer
def post(self, request, *args, **kwargs):
customer = self.get_object()
try:
send_invoice = customer.card_fingerprint == ""
customer.update_card(
request.POST.get("stripe_token")
)
if send_invoice:
customer.send_invoice()
customer.retry_unpaid_invoices()
except stripe.CardError as e:
messages.info(request, "Stripe Error")
return render(
request,
self.template_name,
{
"customer": self.get_object(),
"stripe_error": e.message
}
)
messages.info(request, "Your card is now updated.")
return redirect(self.get_post_success_url())
def get_post_success_url(self):
""" Makes it easier to do custom dj-stripe integrations. """
return reverse("djstripe:account")
class CancelSubscriptionView(LoginRequiredMixin, SubscriptionMixin, FormView):
# TODO - needs tests
template_name = "djstripe/cancel_subscription.html"
form_class = CancelSubscriptionForm
success_url = reverse_lazy("djstripe:account")
def form_valid(self, form):
customer, created = Customer.get_or_create(self.request.user)
current_subscription = customer.cancel_subscription(at_period_end=CANCELLATION_AT_PERIOD_END)
if current_subscription.status == current_subscription.STATUS_CANCELLED:
# If no pro-rate, they get kicked right out.
messages.info(self.request, "Your subscription is now cancelled.")
# logout the user
logout(self.request)
return redirect("home")
else:
# If pro-rate, they get some time to stay.
messages.info(self.request, "Your subscription status is now '{a}' until '{b}'".format(
a=current_subscription.status, b=current_subscription.current_period_end
)
)
return super(CancelSubscriptionView, self).form_valid(form)
class WebHook(CsrfExemptMixin, View):
def post(self, request, *args, **kwargs):
if PY3:
# Handles Python 3 conversion of bytes to str
body = request.body.decode(encoding="UTF-8")
else:
# Handles Python 2
body = request.body
data = json.loads(body)
if Event.objects.filter(stripe_id=data["id"]).exists():
EventProcessingException.objects.create(
data=data,
message="Duplicate event record",
traceback=""
)
else:
event = Event.objects.create(
stripe_id=data["id"],
kind=data["type"],
livemode=data["livemode"],
webhook_message=data
)
event.validate()
event.process()
return HttpResponse()
class HistoryView(LoginRequiredMixin, SelectRelatedMixin, DetailView):
# TODO - needs tests
template_name = "djstripe/history.html"
model = Customer
select_related = ["invoice"]
def get_object(self):
customer, created = Customer.get_or_create(self.request.user)
return customer
class SyncHistoryView(CsrfExemptMixin, LoginRequiredMixin, View):
template_name = "djstripe/includes/_history_table.html"
# TODO - needs tests
def post(self, request, *args, **kwargs):
return render(
request,
self.template_name,
{"customer": sync_customer(request.user)}
)
class AccountView(LoginRequiredMixin, SelectRelatedMixin, TemplateView):
# TODO - needs tests
template_name = "djstripe/account.html"
def get_context_data(self, *args, **kwargs):
context = super(AccountView, self).get_context_data(**kwargs)
customer, created = Customer.get_or_create(self.request.user)
context['customer'] = customer
try:
context['subscription'] = customer.current_subscription
except CurrentSubscription.DoesNotExist:
context['subscription'] = None
context['plans'] = PLAN_LIST
return context
################## Subscription views
class SubscribeFormView(
LoginRequiredMixin,
FormValidMessageMixin,
SubscriptionMixin,
FormView):
# TODO - needs tests
form_class = PlanForm
template_name = "djstripe/subscribe_form.html"
success_url = reverse_lazy("djstripe:history")
form_valid_message = "You are now subscribed!"
def post(self, request, *args, **kwargs):
"""
Handles POST requests, instantiating a form instance with the passed
POST variables and then checked for validity.
"""
form_class = self.get_form_class()
form = self.get_form(form_class)
if form.is_valid():
try:
customer, created = Customer.get_or_create(self.request.user)
customer.update_card(self.request.POST.get("stripe_token"))
customer.subscribe(form.cleaned_data["plan"])
except stripe.StripeError as e:
# add form error here
self.error = e.args[0]
return self.form_invalid(form)
# redirect to confirmation page
return self.form_valid(form)
else:
return self.form_invalid(form)
class ChangePlanView(LoginRequiredMixin,
FormValidMessageMixin,
SubscriptionMixin,
FormView):
form_class = PlanForm
template_name = "djstripe/subscribe_form.html"
success_url = reverse_lazy("djstripe:history")
form_valid_message = "You've just changed your plan!"
def post(self, request, *args, **kwargs):
form = PlanForm(request.POST)
customer = request.user.customer
if form.is_valid():
try:
"""
When a customer upgrades their plan, and PRORATION_POLICY_FOR_UPGRADES is set to True,
then we force the proration of his current plan and use it towards the upgraded plan,
no matter what PRORATION_POLICY is set to.
"""
if PRORATION_POLICY_FOR_UPGRADES:
current_subscription_amount = customer.current_subscription.amount
selected_plan_name = form.cleaned_data["plan"]
selected_plan = next(
(plan for plan in PLAN_LIST if plan["plan"] == selected_plan_name)
)
selected_plan_price = selected_plan["price"]
if not isinstance(selected_plan["price"], decimal.Decimal):
selected_plan_price = selected_plan["price"] / decimal.Decimal("100")
""" Is it an upgrade """
if selected_plan_price > current_subscription_amount:
customer.subscribe(selected_plan_name, prorate=True)
else:
customer.subscribe(selected_plan_name)
else:
customer.subscribe(form.cleaned_data["plan"])
except stripe.StripeError as e:
self.error = e.message
return self.form_invalid(form)
except Exception as e:
raise e
return self.form_valid(form)
else:
return self.form_invalid(form)
######### Web services
class CheckAvailableUserAttributeView(View):
def get(self, request, *args, **kwargs):
attr_name = self.kwargs['attr_name']
not_available = User.objects.filter(
**{attr_name: request.GET.get("v", "")}
).exists()
return HttpResponse(json.dumps(not not_available))
|
{
"content_hash": "374f8b732d413a3b4c13c16de80118cb",
"timestamp": "",
"source": "github",
"line_count": 259,
"max_line_length": 102,
"avg_line_length": 36.285714285714285,
"alnum_prop": 0.6120451159821239,
"repo_name": "mwarkentin/dj-stripe",
"id": "ecf4b3084a2ff422765e0f8ea1c6ae87ca266969",
"size": "9422",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "djstripe/views.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "7003"
},
{
"name": "Python",
"bytes": "204019"
},
{
"name": "Shell",
"bytes": "6466"
}
],
"symlink_target": ""
}
|
from django.conf.urls import url
import views
from rest_framework.urlpatterns import format_suffix_patterns
urlpatterns = [
url(r'^project/', views.ListCreateProject.as_view()),
]
|
{
"content_hash": "eeacebacc57f9724987d444253b90d8d",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 61,
"avg_line_length": 23.25,
"alnum_prop": 0.7634408602150538,
"repo_name": "vinayraghavan/timetracker",
"id": "ac6132e466a6f7f4c89282c2b4be7f48c6edaf33",
"size": "186",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tracker_api/urls.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "2196"
},
{
"name": "Python",
"bytes": "10130"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
from django.core.urlresolvers import reverse
from sentry.models import ApiKey
from sentry.testutils import TestCase, PermissionTestCase
class OrganizationApiKeySettingsPermissionTest(PermissionTestCase):
def setUp(self):
super(OrganizationApiKeySettingsPermissionTest, self).setUp()
key = ApiKey.objects.create(organization=self.organization)
self.path = reverse('sentry-organization-api-key-settings', args=[
self.organization.slug, key.id
])
def test_teamless_admin_cannot_load(self):
self.assert_teamless_admin_cannot_access(self.path)
def test_member_cannot_load(self):
self.assert_member_cannot_access(self.path)
def test_owner_can_load(self):
self.assert_owner_can_access(self.path)
class OrganizationApiKeySettingsTest(TestCase):
def test_renders_with_context(self):
organization = self.create_organization(name='foo', owner=self.user)
key = ApiKey.objects.create(organization=organization)
path = reverse('sentry-organization-api-key-settings', args=[
organization.slug, key.id,
])
self.login_as(self.user)
resp = self.client.get(path)
assert resp.status_code == 200
self.assertTemplateUsed(resp, 'sentry/organization-api-key-settings.html')
assert resp.context['organization'] == organization
assert resp.context['key'] == key
def test_not_found(self):
organization = self.create_organization(name='foo', owner=self.user)
path = reverse('sentry-organization-api-key-settings', args=[
organization.slug, 99999,
])
self.login_as(self.user)
resp = self.client.get(path)
assert resp.status_code == 404
|
{
"content_hash": "53a40a5799e8bc3d5e8831d7f3bd87d6",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 82,
"avg_line_length": 30.694915254237287,
"alnum_prop": 0.6797349530646052,
"repo_name": "mitsuhiko/sentry",
"id": "e02faa6220bc255ad7e789e642d0630b08d52bd8",
"size": "1811",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/sentry/web/frontend/test_organization_api_key_settings.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "171113"
},
{
"name": "Python",
"bytes": "877258"
}
],
"symlink_target": ""
}
|
__author__ = 'Luke Merrett'
from random import randint
from Helpers import datehelper
class Age:
__minimum_potential_lifespan_in_seconds = 86400 # 1 day
__total_potential_lifespan_in_seconds = 31536000 # 1 year
__birth_date = None
__lifespan_in_seconds = None
def __init__(self):
self.__birth_date = datehelper.todays_date()
self.__lifespan_in_seconds = randint(
self.__minimum_potential_lifespan_in_seconds,
self.__total_potential_lifespan_in_seconds)
def current_age_in_seconds(self):
"""
Gets the current age of the pet in seconds
:return: The total age of the pet in seconds
"""
return datehelper.get_total_seconds_difference(datehelper.todays_date(),self.__birth_date)
def current_age_string(self):
"""
Gets the age of the pet in a human readable string
:return: A human readable form of the pets age
"""
current_age = datehelper.get_difference_as_relative_delta(datehelper.todays_date(), self.__birth_date)
return "Your pet is currently %d years %d months %d days %d hours %d minutes old" % (
current_age.years,
current_age.months,
current_age.days,
current_age.hours,
current_age.minutes)
def has_pet_reached_its_lifespan(self):
"""
Returns a value indicating whether the pet has reached it's current lifespan
:return: True if the pet is dead, false if the pet is still kicking around.
"""
time_of_death = datehelper.add_seconds_to_date(self.__birth_date, self.__lifespan_in_seconds)
return datehelper.is_date_earlier_than_today(time_of_death)
|
{
"content_hash": "9ebe91ae12d8912dc1cea5683c612c78",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 110,
"avg_line_length": 38.37777777777778,
"alnum_prop": 0.6340474811812391,
"repo_name": "lukemerrett/PythonPet",
"id": "2b8ad00e20df56c866da38bc46e43233ef979afd",
"size": "1727",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "PetObjects/age.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "6864"
}
],
"symlink_target": ""
}
|
"""Miscellaneous inheritance-related tests, many very old.
These are generally tests derived from specific user issues.
"""
from sqlalchemy import exists
from sqlalchemy import ForeignKey
from sqlalchemy import func
from sqlalchemy import Integer
from sqlalchemy import MetaData
from sqlalchemy import select
from sqlalchemy import Sequence
from sqlalchemy import String
from sqlalchemy import testing
from sqlalchemy import Unicode
from sqlalchemy import util
from sqlalchemy.orm import class_mapper
from sqlalchemy.orm import clear_mappers
from sqlalchemy.orm import column_property
from sqlalchemy.orm import contains_eager
from sqlalchemy.orm import create_session
from sqlalchemy.orm import join
from sqlalchemy.orm import joinedload
from sqlalchemy.orm import mapper
from sqlalchemy.orm import polymorphic_union
from sqlalchemy.orm import Query
from sqlalchemy.orm import relationship
from sqlalchemy.orm import Session
from sqlalchemy.orm import with_polymorphic
from sqlalchemy.orm.interfaces import MANYTOONE
from sqlalchemy.testing import AssertsExecutionResults
from sqlalchemy.testing import eq_
from sqlalchemy.testing import fixtures
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
from sqlalchemy.testing.util import function_named
class AttrSettable(object):
def __init__(self, **kwargs):
[setattr(self, k, v) for k, v in kwargs.items()]
def __repr__(self):
return self.__class__.__name__ + "(%s)" % (hex(id(self)))
class RelationshipTest1(fixtures.MappedTest):
"""test self-referential relationships on polymorphic mappers"""
@classmethod
def define_tables(cls, metadata):
global people, managers
people = Table(
"people",
metadata,
Column(
"person_id",
Integer,
Sequence("person_id_seq", optional=True),
primary_key=True,
),
Column(
"manager_id",
Integer,
ForeignKey(
"managers.person_id", use_alter=True, name="mpid_fq"
),
),
Column("name", String(50)),
Column("type", String(30)),
)
managers = Table(
"managers",
metadata,
Column(
"person_id",
Integer,
ForeignKey("people.person_id"),
primary_key=True,
),
Column("status", String(30)),
Column("manager_name", String(50)),
)
def teardown(self):
people.update(values={people.c.manager_id: None}).execute()
super(RelationshipTest1, self).teardown()
def test_parent_refs_descendant(self):
class Person(AttrSettable):
pass
class Manager(Person):
pass
mapper(
Person,
people,
properties={
"manager": relationship(
Manager,
primaryjoin=(people.c.manager_id == managers.c.person_id),
uselist=False,
post_update=True,
)
},
)
mapper(
Manager,
managers,
inherits=Person,
inherit_condition=people.c.person_id == managers.c.person_id,
)
eq_(
class_mapper(Person).get_property("manager").synchronize_pairs,
[(managers.c.person_id, people.c.manager_id)],
)
session = create_session()
p = Person(name="some person")
m = Manager(name="some manager")
p.manager = m
session.add(p)
session.flush()
session.expunge_all()
p = session.query(Person).get(p.person_id)
m = session.query(Manager).get(m.person_id)
assert p.manager is m
def test_descendant_refs_parent(self):
class Person(AttrSettable):
pass
class Manager(Person):
pass
mapper(Person, people)
mapper(
Manager,
managers,
inherits=Person,
inherit_condition=people.c.person_id == managers.c.person_id,
properties={
"employee": relationship(
Person,
primaryjoin=(people.c.manager_id == managers.c.person_id),
foreign_keys=[people.c.manager_id],
uselist=False,
post_update=True,
)
},
)
session = create_session()
p = Person(name="some person")
m = Manager(name="some manager")
m.employee = p
session.add(m)
session.flush()
session.expunge_all()
p = session.query(Person).get(p.person_id)
m = session.query(Manager).get(m.person_id)
assert m.employee is p
class RelationshipTest2(fixtures.MappedTest):
"""test self-referential relationships on polymorphic mappers"""
@classmethod
def define_tables(cls, metadata):
global people, managers, data
people = Table(
"people",
metadata,
Column(
"person_id",
Integer,
primary_key=True,
test_needs_autoincrement=True,
),
Column("name", String(50)),
Column("type", String(30)),
)
managers = Table(
"managers",
metadata,
Column(
"person_id",
Integer,
ForeignKey("people.person_id"),
primary_key=True,
),
Column("manager_id", Integer, ForeignKey("people.person_id")),
Column("status", String(30)),
)
data = Table(
"data",
metadata,
Column(
"person_id",
Integer,
ForeignKey("managers.person_id"),
primary_key=True,
),
Column("data", String(30)),
)
def test_relationshiponsubclass_j1_nodata(self):
self._do_test("join1", False)
def test_relationshiponsubclass_j2_nodata(self):
self._do_test("join2", False)
def test_relationshiponsubclass_j1_data(self):
self._do_test("join1", True)
def test_relationshiponsubclass_j2_data(self):
self._do_test("join2", True)
def test_relationshiponsubclass_j3_nodata(self):
self._do_test("join3", False)
def test_relationshiponsubclass_j3_data(self):
self._do_test("join3", True)
def _do_test(self, jointype="join1", usedata=False):
class Person(AttrSettable):
pass
class Manager(Person):
pass
if jointype == "join1":
poly_union = polymorphic_union(
{
"person": people.select(
people.c.type == "person"
).subquery(),
"manager": join(
people,
managers,
people.c.person_id == managers.c.person_id,
),
},
None,
)
polymorphic_on = poly_union.c.type
elif jointype == "join2":
poly_union = polymorphic_union(
{
"person": people.select(
people.c.type == "person"
).subquery(),
"manager": managers.join(
people, people.c.person_id == managers.c.person_id
),
},
None,
)
polymorphic_on = poly_union.c.type
elif jointype == "join3":
poly_union = None
polymorphic_on = people.c.type
if usedata:
class Data(object):
def __init__(self, data):
self.data = data
mapper(Data, data)
mapper(
Person,
people,
with_polymorphic=("*", poly_union),
polymorphic_identity="person",
polymorphic_on=polymorphic_on,
)
if usedata:
mapper(
Manager,
managers,
inherits=Person,
inherit_condition=people.c.person_id == managers.c.person_id,
polymorphic_identity="manager",
properties={
"colleague": relationship(
Person,
primaryjoin=managers.c.manager_id
== people.c.person_id,
lazy="select",
uselist=False,
),
"data": relationship(Data, uselist=False),
},
)
else:
mapper(
Manager,
managers,
inherits=Person,
inherit_condition=people.c.person_id == managers.c.person_id,
polymorphic_identity="manager",
properties={
"colleague": relationship(
Person,
primaryjoin=managers.c.manager_id
== people.c.person_id,
lazy="select",
uselist=False,
)
},
)
sess = create_session()
p = Person(name="person1")
m = Manager(name="manager1")
m.colleague = p
if usedata:
m.data = Data("ms data")
sess.add(m)
sess.flush()
sess.expunge_all()
p = sess.query(Person).get(p.person_id)
m = sess.query(Manager).get(m.person_id)
assert m.colleague is p
if usedata:
assert m.data.data == "ms data"
class RelationshipTest3(fixtures.MappedTest):
"""test self-referential relationships on polymorphic mappers"""
@classmethod
def define_tables(cls, metadata):
global people, managers, data
people = Table(
"people",
metadata,
Column(
"person_id",
Integer,
primary_key=True,
test_needs_autoincrement=True,
),
Column("colleague_id", Integer, ForeignKey("people.person_id")),
Column("name", String(50)),
Column("type", String(30)),
)
managers = Table(
"managers",
metadata,
Column(
"person_id",
Integer,
ForeignKey("people.person_id"),
primary_key=True,
),
Column("status", String(30)),
)
data = Table(
"data",
metadata,
Column(
"person_id",
Integer,
ForeignKey("people.person_id"),
primary_key=True,
),
Column("data", String(30)),
)
def _generate_test(jointype="join1", usedata=False):
def _do_test(self):
class Person(AttrSettable):
pass
class Manager(Person):
pass
if usedata:
class Data(object):
def __init__(self, data):
self.data = data
if jointype == "join1":
poly_union = polymorphic_union(
{
"manager": managers.join(
people, people.c.person_id == managers.c.person_id
),
"person": people.select(
people.c.type == "person"
).subquery(),
},
None,
)
elif jointype == "join2":
poly_union = polymorphic_union(
{
"manager": join(
people,
managers,
people.c.person_id == managers.c.person_id,
),
"person": people.select(
people.c.type == "person"
).subquery(),
},
None,
)
elif jointype == "join3":
poly_union = people.outerjoin(managers)
elif jointype == "join4":
poly_union = None
if usedata:
mapper(Data, data)
if usedata:
mapper(
Person,
people,
with_polymorphic=("*", poly_union),
polymorphic_identity="person",
polymorphic_on=people.c.type,
properties={
"colleagues": relationship(
Person,
primaryjoin=people.c.colleague_id
== people.c.person_id,
remote_side=people.c.colleague_id,
uselist=True,
),
"data": relationship(Data, uselist=False),
},
)
else:
mapper(
Person,
people,
with_polymorphic=("*", poly_union),
polymorphic_identity="person",
polymorphic_on=people.c.type,
properties={
"colleagues": relationship(
Person,
primaryjoin=people.c.colleague_id
== people.c.person_id,
remote_side=people.c.colleague_id,
uselist=True,
)
},
)
mapper(
Manager,
managers,
inherits=Person,
inherit_condition=people.c.person_id == managers.c.person_id,
polymorphic_identity="manager",
)
sess = create_session()
p = Person(name="person1")
p2 = Person(name="person2")
p3 = Person(name="person3")
m = Manager(name="manager1")
p.colleagues.append(p2)
m.colleagues.append(p3)
if usedata:
p.data = Data("ps data")
m.data = Data("ms data")
sess.add(m)
sess.add(p)
sess.flush()
sess.expunge_all()
p = sess.query(Person).get(p.person_id)
p2 = sess.query(Person).get(p2.person_id)
p3 = sess.query(Person).get(p3.person_id)
m = sess.query(Person).get(m.person_id)
assert len(p.colleagues) == 1
assert p.colleagues == [p2]
assert m.colleagues == [p3]
if usedata:
assert p.data.data == "ps data"
assert m.data.data == "ms data"
do_test = function_named(
_do_test,
"test_relationship_on_base_class_%s_%s"
% (jointype, data and "nodata" or "data"),
)
return do_test
for jointype in ["join1", "join2", "join3", "join4"]:
for data in (True, False):
_fn = _generate_test(jointype, data)
setattr(RelationshipTest3, _fn.__name__, _fn)
del _fn
class RelationshipTest4(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
global people, engineers, managers, cars
people = Table(
"people",
metadata,
Column(
"person_id",
Integer,
primary_key=True,
test_needs_autoincrement=True,
),
Column("name", String(50)),
)
engineers = Table(
"engineers",
metadata,
Column(
"person_id",
Integer,
ForeignKey("people.person_id"),
primary_key=True,
),
Column("status", String(30)),
)
managers = Table(
"managers",
metadata,
Column(
"person_id",
Integer,
ForeignKey("people.person_id"),
primary_key=True,
),
Column("longer_status", String(70)),
)
cars = Table(
"cars",
metadata,
Column(
"car_id",
Integer,
primary_key=True,
test_needs_autoincrement=True,
),
Column("owner", Integer, ForeignKey("people.person_id")),
)
def test_many_to_one_polymorphic(self):
"""in this test, the polymorphic union is between two subclasses, but
does not include the base table by itself in the union. however, the
primaryjoin condition is going to be against the base table, and its a
many-to-one relationship (unlike the test in polymorph.py) so the
column in the base table is explicit. Can the ClauseAdapter figure out
how to alias the primaryjoin to the polymorphic union ?"""
# class definitions
class Person(object):
def __init__(self, **kwargs):
for key, value in kwargs.items():
setattr(self, key, value)
def __repr__(self):
return "Ordinary person %s" % self.name
class Engineer(Person):
def __repr__(self):
return "Engineer %s, status %s" % (self.name, self.status)
class Manager(Person):
def __repr__(self):
return "Manager %s, status %s" % (
self.name,
self.longer_status,
)
class Car(object):
def __init__(self, **kwargs):
for key, value in kwargs.items():
setattr(self, key, value)
def __repr__(self):
return "Car number %d" % self.car_id
# create a union that represents both types of joins.
employee_join = polymorphic_union(
{
"engineer": people.join(engineers),
"manager": people.join(managers),
},
"type",
"employee_join",
)
person_mapper = mapper(
Person,
people,
with_polymorphic=("*", employee_join),
polymorphic_on=employee_join.c.type,
polymorphic_identity="person",
)
mapper(
Engineer,
engineers,
inherits=person_mapper,
polymorphic_identity="engineer",
)
mapper(
Manager,
managers,
inherits=person_mapper,
polymorphic_identity="manager",
)
mapper(Car, cars, properties={"employee": relationship(person_mapper)})
session = create_session()
# creating 5 managers named from M1 to E5
for i in range(1, 5):
session.add(Manager(name="M%d" % i, longer_status="YYYYYYYYY"))
# creating 5 engineers named from E1 to E5
for i in range(1, 5):
session.add(Engineer(name="E%d" % i, status="X"))
session.flush()
engineer4 = (
session.query(Engineer).filter(Engineer.name == "E4").first()
)
manager3 = session.query(Manager).filter(Manager.name == "M3").first()
car1 = Car(employee=engineer4)
session.add(car1)
car2 = Car(employee=manager3)
session.add(car2)
session.flush()
session.expunge_all()
def go():
testcar = (
session.query(Car)
.options(joinedload("employee"))
.get(car1.car_id)
)
assert str(testcar.employee) == "Engineer E4, status X"
self.assert_sql_count(testing.db, go, 1)
car1 = session.query(Car).get(car1.car_id)
usingGet = session.query(person_mapper).get(car1.owner)
usingProperty = car1.employee
assert str(engineer4) == "Engineer E4, status X"
assert str(usingGet) == "Engineer E4, status X"
assert str(usingProperty) == "Engineer E4, status X"
session.expunge_all()
# and now for the lightning round, eager !
def go():
testcar = (
session.query(Car)
.options(joinedload("employee"))
.get(car1.car_id)
)
assert str(testcar.employee) == "Engineer E4, status X"
self.assert_sql_count(testing.db, go, 1)
session.expunge_all()
s = session.query(Car)
c = s.join("employee").filter(Person.name == "E4")[0]
assert c.car_id == car1.car_id
class RelationshipTest5(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
global people, engineers, managers, cars
people = Table(
"people",
metadata,
Column(
"person_id",
Integer,
primary_key=True,
test_needs_autoincrement=True,
),
Column("name", String(50)),
Column("type", String(50)),
)
engineers = Table(
"engineers",
metadata,
Column(
"person_id",
Integer,
ForeignKey("people.person_id"),
primary_key=True,
),
Column("status", String(30)),
)
managers = Table(
"managers",
metadata,
Column(
"person_id",
Integer,
ForeignKey("people.person_id"),
primary_key=True,
),
Column("longer_status", String(70)),
)
cars = Table(
"cars",
metadata,
Column(
"car_id",
Integer,
primary_key=True,
test_needs_autoincrement=True,
),
Column("owner", Integer, ForeignKey("people.person_id")),
)
def test_eager_empty(self):
"""test parent object with child relationship to an inheriting mapper,
using eager loads, works when there are no child objects present"""
class Person(object):
def __init__(self, **kwargs):
for key, value in kwargs.items():
setattr(self, key, value)
def __repr__(self):
return "Ordinary person %s" % self.name
class Engineer(Person):
def __repr__(self):
return "Engineer %s, status %s" % (self.name, self.status)
class Manager(Person):
def __repr__(self):
return "Manager %s, status %s" % (
self.name,
self.longer_status,
)
class Car(object):
def __init__(self, **kwargs):
for key, value in kwargs.items():
setattr(self, key, value)
def __repr__(self):
return "Car number %d" % self.car_id
person_mapper = mapper(
Person,
people,
polymorphic_on=people.c.type,
polymorphic_identity="person",
)
mapper(
Engineer,
engineers,
inherits=person_mapper,
polymorphic_identity="engineer",
)
manager_mapper = mapper(
Manager,
managers,
inherits=person_mapper,
polymorphic_identity="manager",
)
mapper(
Car,
cars,
properties={
"manager": relationship(manager_mapper, lazy="joined")
},
)
sess = create_session()
car1 = Car()
car2 = Car()
car2.manager = Manager()
sess.add(car1)
sess.add(car2)
sess.flush()
sess.expunge_all()
carlist = sess.query(Car).all()
assert carlist[0].manager is None
assert carlist[1].manager.person_id == car2.manager.person_id
class RelationshipTest6(fixtures.MappedTest):
"""test self-referential relationships on a single joined-table
inheritance mapper"""
@classmethod
def define_tables(cls, metadata):
global people, managers, data
people = Table(
"people",
metadata,
Column(
"person_id",
Integer,
primary_key=True,
test_needs_autoincrement=True,
),
Column("name", String(50)),
)
managers = Table(
"managers",
metadata,
Column(
"person_id",
Integer,
ForeignKey("people.person_id"),
primary_key=True,
),
Column("colleague_id", Integer, ForeignKey("managers.person_id")),
Column("status", String(30)),
)
def test_basic(self):
class Person(AttrSettable):
pass
class Manager(Person):
pass
mapper(Person, people)
mapper(
Manager,
managers,
inherits=Person,
inherit_condition=people.c.person_id == managers.c.person_id,
properties={
"colleague": relationship(
Manager,
primaryjoin=managers.c.colleague_id
== managers.c.person_id,
lazy="select",
uselist=False,
)
},
)
sess = create_session()
m = Manager(name="manager1")
m2 = Manager(name="manager2")
m.colleague = m2
sess.add(m)
sess.flush()
sess.expunge_all()
m = sess.query(Manager).get(m.person_id)
m2 = sess.query(Manager).get(m2.person_id)
assert m.colleague is m2
class RelationshipTest7(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
global people, engineers, managers, cars, offroad_cars
cars = Table(
"cars",
metadata,
Column(
"car_id",
Integer,
primary_key=True,
test_needs_autoincrement=True,
),
Column("name", String(30)),
)
offroad_cars = Table(
"offroad_cars",
metadata,
Column(
"car_id",
Integer,
ForeignKey("cars.car_id"),
nullable=False,
primary_key=True,
),
)
people = Table(
"people",
metadata,
Column(
"person_id",
Integer,
primary_key=True,
test_needs_autoincrement=True,
),
Column(
"car_id", Integer, ForeignKey("cars.car_id"), nullable=False
),
Column("name", String(50)),
)
engineers = Table(
"engineers",
metadata,
Column(
"person_id",
Integer,
ForeignKey("people.person_id"),
primary_key=True,
),
Column("field", String(30)),
)
managers = Table(
"managers",
metadata,
Column(
"person_id",
Integer,
ForeignKey("people.person_id"),
primary_key=True,
),
Column("category", String(70)),
)
def test_manytoone_lazyload(self):
"""test that lazy load clause to a polymorphic child mapper generates
correctly [ticket:493]"""
class PersistentObject(object):
def __init__(self, **kwargs):
for key, value in kwargs.items():
setattr(self, key, value)
class Status(PersistentObject):
def __repr__(self):
return "Status %s" % self.name
class Person(PersistentObject):
def __repr__(self):
return "Ordinary person %s" % self.name
class Engineer(Person):
def __repr__(self):
return "Engineer %s, field %s" % (self.name, self.field)
class Manager(Person):
def __repr__(self):
return "Manager %s, category %s" % (self.name, self.category)
class Car(PersistentObject):
def __repr__(self):
return "Car number %d, name %s" % (self.car_id, self.name)
class Offraod_Car(Car):
def __repr__(self):
return "Offroad Car number %d, name %s" % (
self.car_id,
self.name,
)
employee_join = polymorphic_union(
{
"engineer": people.join(engineers),
"manager": people.join(managers),
},
"type",
"employee_join",
)
car_join = polymorphic_union(
{
"car": cars.outerjoin(offroad_cars)
.select(offroad_cars.c.car_id == None)
.reduce_columns()
.subquery(),
"offroad": cars.join(offroad_cars),
},
"type",
"car_join",
)
car_mapper = mapper(
Car,
cars,
with_polymorphic=("*", car_join),
polymorphic_on=car_join.c.type,
polymorphic_identity="car",
)
mapper(
Offraod_Car,
offroad_cars,
inherits=car_mapper,
polymorphic_identity="offroad",
)
person_mapper = mapper(
Person,
people,
with_polymorphic=("*", employee_join),
polymorphic_on=employee_join.c.type,
polymorphic_identity="person",
properties={"car": relationship(car_mapper)},
)
mapper(
Engineer,
engineers,
inherits=person_mapper,
polymorphic_identity="engineer",
)
mapper(
Manager,
managers,
inherits=person_mapper,
polymorphic_identity="manager",
)
session = create_session()
for i in range(1, 4):
if i % 2:
car = Car()
else:
car = Offraod_Car()
session.add(Manager(name="M%d" % i, category="YYYYYYYYY", car=car))
session.add(Engineer(name="E%d" % i, field="X", car=car))
session.flush()
session.expunge_all()
r = session.query(Person).all()
for p in r:
assert p.car_id == p.car.car_id
class RelationshipTest8(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
global taggable, users
taggable = Table(
"taggable",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("type", String(30)),
Column("owner_id", Integer, ForeignKey("taggable.id")),
)
users = Table(
"users",
metadata,
Column("id", Integer, ForeignKey("taggable.id"), primary_key=True),
Column("data", String(50)),
)
def test_selfref_onjoined(self):
class Taggable(fixtures.ComparableEntity):
pass
class User(Taggable):
pass
mapper(
Taggable,
taggable,
polymorphic_on=taggable.c.type,
polymorphic_identity="taggable",
properties={
"owner": relationship(
User,
primaryjoin=taggable.c.owner_id == taggable.c.id,
remote_side=taggable.c.id,
)
},
)
mapper(
User,
users,
inherits=Taggable,
polymorphic_identity="user",
inherit_condition=users.c.id == taggable.c.id,
)
u1 = User(data="u1")
t1 = Taggable(owner=u1)
sess = create_session()
sess.add(t1)
sess.flush()
sess.expunge_all()
eq_(
sess.query(Taggable).order_by(Taggable.id).all(),
[User(data="u1"), Taggable(owner=User(data="u1"))],
)
class GenerativeTest(fixtures.TestBase, AssertsExecutionResults):
@classmethod
def setup_class(cls):
# cars---owned by--- people (abstract) --- has a --- status
# | ^ ^ |
# | | | |
# | engineers managers |
# | |
# +--------------------------------------- has a ------+
global metadata, status, people, engineers, managers, cars
metadata = MetaData(testing.db)
# table definitions
status = Table(
"status",
metadata,
Column(
"status_id",
Integer,
primary_key=True,
test_needs_autoincrement=True,
),
Column("name", String(20)),
)
people = Table(
"people",
metadata,
Column(
"person_id",
Integer,
primary_key=True,
test_needs_autoincrement=True,
),
Column(
"status_id",
Integer,
ForeignKey("status.status_id"),
nullable=False,
),
Column("name", String(50)),
)
engineers = Table(
"engineers",
metadata,
Column(
"person_id",
Integer,
ForeignKey("people.person_id"),
primary_key=True,
),
Column("field", String(30)),
)
managers = Table(
"managers",
metadata,
Column(
"person_id",
Integer,
ForeignKey("people.person_id"),
primary_key=True,
),
Column("category", String(70)),
)
cars = Table(
"cars",
metadata,
Column(
"car_id",
Integer,
primary_key=True,
test_needs_autoincrement=True,
),
Column(
"status_id",
Integer,
ForeignKey("status.status_id"),
nullable=False,
),
Column(
"owner",
Integer,
ForeignKey("people.person_id"),
nullable=False,
),
)
metadata.create_all()
@classmethod
def teardown_class(cls):
metadata.drop_all()
def teardown(self):
clear_mappers()
for t in reversed(metadata.sorted_tables):
t.delete().execute()
def test_join_to(self):
# class definitions
class PersistentObject(object):
def __init__(self, **kwargs):
for key, value in kwargs.items():
setattr(self, key, value)
class Status(PersistentObject):
def __repr__(self):
return "Status %s" % self.name
class Person(PersistentObject):
def __repr__(self):
return "Ordinary person %s" % self.name
class Engineer(Person):
def __repr__(self):
return "Engineer %s, field %s, status %s" % (
self.name,
self.field,
self.status,
)
class Manager(Person):
def __repr__(self):
return "Manager %s, category %s, status %s" % (
self.name,
self.category,
self.status,
)
class Car(PersistentObject):
def __repr__(self):
return "Car number %d" % self.car_id
# create a union that represents both types of joins.
employee_join = polymorphic_union(
{
"engineer": people.join(engineers),
"manager": people.join(managers),
},
"type",
"employee_join",
)
status_mapper = mapper(Status, status)
person_mapper = mapper(
Person,
people,
with_polymorphic=("*", employee_join),
polymorphic_on=employee_join.c.type,
polymorphic_identity="person",
properties={"status": relationship(status_mapper)},
)
engineer_mapper = mapper(
Engineer,
engineers,
inherits=person_mapper,
polymorphic_identity="engineer",
)
mapper(
Manager,
managers,
inherits=person_mapper,
polymorphic_identity="manager",
)
mapper(
Car,
cars,
properties={
"employee": relationship(person_mapper),
"status": relationship(status_mapper),
},
)
session = create_session()
active = Status(name="active")
dead = Status(name="dead")
session.add(active)
session.add(dead)
session.flush()
# TODO: we haven't created assertions for all
# the data combinations created here
# creating 5 managers named from M1 to M5
# and 5 engineers named from E1 to E5
# M4, M5, E4 and E5 are dead
for i in range(1, 5):
if i < 4:
st = active
else:
st = dead
session.add(
Manager(name="M%d" % i, category="YYYYYYYYY", status=st)
)
session.add(Engineer(name="E%d" % i, field="X", status=st))
session.flush()
# get E4
engineer4 = session.query(engineer_mapper).filter_by(name="E4").one()
# create 2 cars for E4, one active and one dead
car1 = Car(employee=engineer4, status=active)
car2 = Car(employee=engineer4, status=dead)
session.add(car1)
session.add(car2)
session.flush()
# this particular adapt used to cause a recursion overflow;
# added here for testing
Query(Person)._adapt_clause(employee_join, False, False)
r = (
session.query(Person)
.filter(Person.name.like("%2"))
.join("status")
.filter_by(name="active")
.order_by(Person.person_id)
)
eq_(
str(list(r)),
"[Manager M2, category YYYYYYYYY, status "
"Status active, Engineer E2, field X, "
"status Status active]",
)
r = (
session.query(Engineer)
.join("status")
.filter(
Person.name.in_(["E2", "E3", "E4", "M4", "M2", "M1"])
& (status.c.name == "active")
)
.order_by(Person.name)
)
eq_(
str(list(r)),
"[Engineer E2, field X, status Status "
"active, Engineer E3, field X, status "
"Status active]",
)
r = session.query(Person).filter(
exists([1], Car.owner == Person.person_id)
)
eq_(str(list(r)), "[Engineer E4, field X, status Status dead]")
class MultiLevelTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
global table_Employee, table_Engineer, table_Manager
table_Employee = Table(
"Employee",
metadata,
Column("name", type_=String(100)),
Column(
"id",
primary_key=True,
type_=Integer,
test_needs_autoincrement=True,
),
Column("atype", type_=String(100)),
)
table_Engineer = Table(
"Engineer",
metadata,
Column("machine", type_=String(100)),
Column("id", Integer, ForeignKey("Employee.id"), primary_key=True),
)
table_Manager = Table(
"Manager",
metadata,
Column("duties", type_=String(100)),
Column("id", Integer, ForeignKey("Engineer.id"), primary_key=True),
)
def test_threelevels(self):
class Employee(object):
def set(me, **kargs):
for k, v in kargs.items():
setattr(me, k, v)
return me
def __str__(me):
return str(me.__class__.__name__) + ":" + str(me.name)
__repr__ = __str__
class Engineer(Employee):
pass
class Manager(Engineer):
pass
pu_Employee = polymorphic_union(
{
"Manager": table_Employee.join(table_Engineer).join(
table_Manager
),
"Engineer": select(
[table_Employee, table_Engineer.c.machine],
table_Employee.c.atype == "Engineer",
from_obj=[table_Employee.join(table_Engineer)],
).subquery(),
"Employee": table_Employee.select(
table_Employee.c.atype == "Employee"
).subquery(),
},
None,
"pu_employee",
)
mapper_Employee = mapper(
Employee,
table_Employee,
polymorphic_identity="Employee",
polymorphic_on=pu_Employee.c.atype,
with_polymorphic=("*", pu_Employee),
)
pu_Engineer = polymorphic_union(
{
"Manager": table_Employee.join(table_Engineer).join(
table_Manager
),
"Engineer": select(
[table_Employee, table_Engineer.c.machine],
table_Employee.c.atype == "Engineer",
from_obj=[table_Employee.join(table_Engineer)],
).subquery(),
},
None,
"pu_engineer",
)
mapper_Engineer = mapper(
Engineer,
table_Engineer,
inherit_condition=table_Engineer.c.id == table_Employee.c.id,
inherits=mapper_Employee,
polymorphic_identity="Engineer",
polymorphic_on=pu_Engineer.c.atype,
with_polymorphic=("*", pu_Engineer),
)
mapper(
Manager,
table_Manager,
inherit_condition=table_Manager.c.id == table_Engineer.c.id,
inherits=mapper_Engineer,
polymorphic_identity="Manager",
)
a = Employee().set(name="one")
b = Engineer().set(egn="two", machine="any")
c = Manager().set(name="head", machine="fast", duties="many")
session = create_session()
session.add(a)
session.add(b)
session.add(c)
session.flush()
assert set(session.query(Employee).all()) == set([a, b, c])
assert set(session.query(Engineer).all()) == set([b, c])
assert session.query(Manager).all() == [c]
class ManyToManyPolyTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
global base_item_table, item_table
global base_item_collection_table, collection_table
base_item_table = Table(
"base_item",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("child_name", String(255), default=None),
)
item_table = Table(
"item",
metadata,
Column(
"id", Integer, ForeignKey("base_item.id"), primary_key=True
),
Column("dummy", Integer, default=0),
)
base_item_collection_table = Table(
"base_item_collection",
metadata,
Column("item_id", Integer, ForeignKey("base_item.id")),
Column("collection_id", Integer, ForeignKey("collection.id")),
)
collection_table = Table(
"collection",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("name", Unicode(255)),
)
def test_pjoin_compile(self):
"""test that remote_side columns in the secondary join table
aren't attempted to be matched to the target polymorphic
selectable"""
class BaseItem(object):
pass
class Item(BaseItem):
pass
class Collection(object):
pass
item_join = polymorphic_union(
{
"BaseItem": base_item_table.select(
base_item_table.c.child_name == "BaseItem"
).subquery(),
"Item": base_item_table.join(item_table),
},
None,
"item_join",
)
mapper(
BaseItem,
base_item_table,
with_polymorphic=("*", item_join),
polymorphic_on=base_item_table.c.child_name,
polymorphic_identity="BaseItem",
properties=dict(
collections=relationship(
Collection,
secondary=base_item_collection_table,
backref="items",
)
),
)
mapper(
Item, item_table, inherits=BaseItem, polymorphic_identity="Item"
)
mapper(Collection, collection_table)
class_mapper(BaseItem)
class CustomPKTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
global t1, t2
t1 = Table(
"t1",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("type", String(30), nullable=False),
Column("data", String(30)),
)
# note that the primary key column in t2 is named differently
t2 = Table(
"t2",
metadata,
Column("t2id", Integer, ForeignKey("t1.id"), primary_key=True),
Column("t2data", String(30)),
)
def test_custompk(self):
"""test that the primary_key attribute is propagated to the
polymorphic mapper"""
class T1(object):
pass
class T2(T1):
pass
# create a polymorphic union with the select against the base table
# first. with the join being second, the alias of the union will
# pick up two "primary key" columns. technically the alias should have
# a 2-col pk in any case but the leading select has a NULL for the
# "t2id" column
d = util.OrderedDict()
d["t1"] = t1.select(t1.c.type == "t1").subquery()
d["t2"] = t1.join(t2)
pjoin = polymorphic_union(d, None, "pjoin")
mapper(
T1,
t1,
polymorphic_on=t1.c.type,
polymorphic_identity="t1",
with_polymorphic=("*", pjoin),
primary_key=[pjoin.c.id],
)
mapper(T2, t2, inherits=T1, polymorphic_identity="t2")
ot1 = T1()
ot2 = T2()
sess = create_session()
sess.add(ot1)
sess.add(ot2)
sess.flush()
sess.expunge_all()
# query using get(), using only one value.
# this requires the select_table mapper
# has the same single-col primary key.
assert sess.query(T1).get(ot1.id).id == ot1.id
ot1 = sess.query(T1).get(ot1.id)
ot1.data = "hi"
sess.flush()
def test_pk_collapses(self):
"""test that a composite primary key attribute formed by a join
is "collapsed" into its minimal columns"""
class T1(object):
pass
class T2(T1):
pass
# create a polymorphic union with the select against the base table
# first. with the join being second, the alias of the union will
# pick up two "primary key" columns. technically the alias should have
# a 2-col pk in any case but the leading select has a NULL for the
# "t2id" column
d = util.OrderedDict()
d["t1"] = t1.select(t1.c.type == "t1").subquery()
d["t2"] = t1.join(t2)
pjoin = polymorphic_union(d, None, "pjoin")
mapper(
T1,
t1,
polymorphic_on=t1.c.type,
polymorphic_identity="t1",
with_polymorphic=("*", pjoin),
)
mapper(T2, t2, inherits=T1, polymorphic_identity="t2")
assert len(class_mapper(T1).primary_key) == 1
ot1 = T1()
ot2 = T2()
sess = create_session()
sess.add(ot1)
sess.add(ot2)
sess.flush()
sess.expunge_all()
# query using get(), using only one value. this requires the
# select_table mapper
# has the same single-col primary key.
assert sess.query(T1).get(ot1.id).id == ot1.id
ot1 = sess.query(T1).get(ot1.id)
ot1.data = "hi"
sess.flush()
class InheritingEagerTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
global people, employees, tags, peopleTags
people = Table(
"people",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("_type", String(30), nullable=False),
)
employees = Table(
"employees",
metadata,
Column("id", Integer, ForeignKey("people.id"), primary_key=True),
)
tags = Table(
"tags",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("label", String(50), nullable=False),
)
peopleTags = Table(
"peopleTags",
metadata,
Column("person_id", Integer, ForeignKey("people.id")),
Column("tag_id", Integer, ForeignKey("tags.id")),
)
def test_basic(self):
"""test that Query uses the full set of mapper._eager_loaders
when generating SQL"""
class Person(fixtures.ComparableEntity):
pass
class Employee(Person):
def __init__(self, name="bob"):
self.name = name
class Tag(fixtures.ComparableEntity):
def __init__(self, label):
self.label = label
mapper(
Person,
people,
polymorphic_on=people.c._type,
polymorphic_identity="person",
properties={
"tags": relationship(
Tag, secondary=peopleTags, backref="people", lazy="joined"
)
},
)
mapper(
Employee,
employees,
inherits=Person,
polymorphic_identity="employee",
)
mapper(Tag, tags)
session = create_session()
bob = Employee()
session.add(bob)
tag = Tag("crazy")
bob.tags.append(tag)
tag = Tag("funny")
bob.tags.append(tag)
session.flush()
session.expunge_all()
# query from Employee with limit, query needs to apply eager limiting
# subquery
instance = session.query(Employee).filter_by(id=1).limit(1).first()
assert len(instance.tags) == 2
class MissingPolymorphicOnTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table(
"tablea",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("adata", String(50)),
)
Table(
"tableb",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("aid", Integer, ForeignKey("tablea.id")),
Column("data", String(50)),
)
Table(
"tablec",
metadata,
Column("id", Integer, ForeignKey("tablea.id"), primary_key=True),
Column("cdata", String(50)),
)
Table(
"tabled",
metadata,
Column("id", Integer, ForeignKey("tablec.id"), primary_key=True),
Column("ddata", String(50)),
)
@classmethod
def setup_classes(cls):
class A(cls.Comparable):
pass
class B(cls.Comparable):
pass
class C(A):
pass
class D(C):
pass
def test_polyon_col_setsup(self):
tablea, tableb, tablec, tabled = (
self.tables.tablea,
self.tables.tableb,
self.tables.tablec,
self.tables.tabled,
)
A, B, C, D = (
self.classes.A,
self.classes.B,
self.classes.C,
self.classes.D,
)
poly_select = select(
[tablea, tableb.c.data.label("discriminator")],
from_obj=tablea.join(tableb),
).alias("poly")
mapper(B, tableb)
mapper(
A,
tablea,
with_polymorphic=("*", poly_select),
polymorphic_on=poly_select.c.discriminator,
properties={"b": relationship(B, uselist=False)},
)
mapper(C, tablec, inherits=A, polymorphic_identity="c")
mapper(D, tabled, inherits=C, polymorphic_identity="d")
c = C(cdata="c1", adata="a1", b=B(data="c"))
d = D(cdata="c2", adata="a2", ddata="d2", b=B(data="d"))
sess = create_session()
sess.add(c)
sess.add(d)
sess.flush()
sess.expunge_all()
eq_(
sess.query(A).all(),
[C(cdata="c1", adata="a1"), D(cdata="c2", adata="a2", ddata="d2")],
)
class JoinedInhAdjacencyTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table(
"people",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("type", String(30)),
)
Table(
"users",
metadata,
Column("id", Integer, ForeignKey("people.id"), primary_key=True),
Column("supervisor_id", Integer, ForeignKey("people.id")),
)
Table(
"dudes",
metadata,
Column("id", Integer, ForeignKey("users.id"), primary_key=True),
)
@classmethod
def setup_classes(cls):
class Person(cls.Comparable):
pass
class User(Person):
pass
class Dude(User):
pass
def _roundtrip(self):
User = self.classes.User
sess = Session()
u1 = User()
u2 = User()
u2.supervisor = u1
sess.add_all([u1, u2])
sess.commit()
assert u2.supervisor is u1
def _dude_roundtrip(self):
Dude, User = self.classes.Dude, self.classes.User
sess = Session()
u1 = User()
d1 = Dude()
d1.supervisor = u1
sess.add_all([u1, d1])
sess.commit()
assert d1.supervisor is u1
def test_joined_to_base(self):
people, users = self.tables.people, self.tables.users
Person, User = self.classes.Person, self.classes.User
mapper(
Person,
people,
polymorphic_on=people.c.type,
polymorphic_identity="person",
)
mapper(
User,
users,
inherits=Person,
polymorphic_identity="user",
inherit_condition=(users.c.id == people.c.id),
properties={
"supervisor": relationship(
Person, primaryjoin=users.c.supervisor_id == people.c.id
)
},
)
assert User.supervisor.property.direction is MANYTOONE
self._roundtrip()
def test_joined_to_same_subclass(self):
people, users = self.tables.people, self.tables.users
Person, User = self.classes.Person, self.classes.User
mapper(
Person,
people,
polymorphic_on=people.c.type,
polymorphic_identity="person",
)
mapper(
User,
users,
inherits=Person,
polymorphic_identity="user",
inherit_condition=(users.c.id == people.c.id),
properties={
"supervisor": relationship(
User,
primaryjoin=users.c.supervisor_id == people.c.id,
remote_side=people.c.id,
foreign_keys=[users.c.supervisor_id],
)
},
)
assert User.supervisor.property.direction is MANYTOONE
self._roundtrip()
def test_joined_subclass_to_superclass(self):
people, users, dudes = (
self.tables.people,
self.tables.users,
self.tables.dudes,
)
Person, User, Dude = (
self.classes.Person,
self.classes.User,
self.classes.Dude,
)
mapper(
Person,
people,
polymorphic_on=people.c.type,
polymorphic_identity="person",
)
mapper(
User,
users,
inherits=Person,
polymorphic_identity="user",
inherit_condition=(users.c.id == people.c.id),
)
mapper(
Dude,
dudes,
inherits=User,
polymorphic_identity="dude",
inherit_condition=(dudes.c.id == users.c.id),
properties={
"supervisor": relationship(
User,
primaryjoin=users.c.supervisor_id == people.c.id,
remote_side=people.c.id,
foreign_keys=[users.c.supervisor_id],
)
},
)
assert Dude.supervisor.property.direction is MANYTOONE
self._dude_roundtrip()
class Ticket2419Test(fixtures.DeclarativeMappedTest):
"""Test [ticket:2419]'s test case."""
@classmethod
def setup_classes(cls):
Base = cls.DeclarativeBasic
class A(Base):
__tablename__ = "a"
id = Column(
Integer, primary_key=True, test_needs_autoincrement=True
)
class B(Base):
__tablename__ = "b"
id = Column(
Integer, primary_key=True, test_needs_autoincrement=True
)
ds = relationship("D")
es = relationship("E")
class C(A):
__tablename__ = "c"
id = Column(Integer, ForeignKey("a.id"), primary_key=True)
b_id = Column(Integer, ForeignKey("b.id"))
b = relationship("B", primaryjoin=b_id == B.id)
class D(Base):
__tablename__ = "d"
id = Column(
Integer, primary_key=True, test_needs_autoincrement=True
)
b_id = Column(Integer, ForeignKey("b.id"))
class E(Base):
__tablename__ = "e"
id = Column(
Integer, primary_key=True, test_needs_autoincrement=True
)
b_id = Column(Integer, ForeignKey("b.id"))
@testing.fails_on(
["oracle", "mssql"],
"Oracle / SQL server engines can't handle this, "
"not clear if there's an expression-level bug on our "
"end though",
)
def test_join_w_eager_w_any(self):
B, C, D = (self.classes.B, self.classes.C, self.classes.D)
s = Session(testing.db)
b = B(ds=[D()])
s.add_all([C(b=b)])
s.commit()
q = s.query(B, B.ds.any(D.id == 1)).options(joinedload("es"))
q = q.join(C, C.b_id == B.id)
q = q.limit(5)
eq_(q.all(), [(b, True)])
class ColSubclassTest(
fixtures.DeclarativeMappedTest, testing.AssertsCompiledSQL
):
"""Test [ticket:2918]'s test case."""
run_create_tables = run_deletes = None
__dialect__ = "default"
@classmethod
def setup_classes(cls):
from sqlalchemy.schema import Column
Base = cls.DeclarativeBasic
class A(Base):
__tablename__ = "a"
id = Column(Integer, primary_key=True)
class MySpecialColumn(Column):
pass
class B(A):
__tablename__ = "b"
id = Column(ForeignKey("a.id"), primary_key=True)
x = MySpecialColumn(String)
def test_polymorphic_adaptation(self):
A, B = self.classes.A, self.classes.B
s = Session()
self.assert_compile(
s.query(A).join(B).filter(B.x == "test"),
"SELECT a.id AS a_id FROM a JOIN "
"(a AS a_1 JOIN b AS b_1 ON a_1.id = b_1.id) "
"ON a.id = b_1.id WHERE b_1.x = :x_1",
)
class CorrelateExceptWPolyAdaptTest(
fixtures.DeclarativeMappedTest, testing.AssertsCompiledSQL
):
# test [ticket:4537]'s test case.
run_create_tables = run_deletes = None
run_setup_classes = run_setup_mappers = run_define_tables = "each"
__dialect__ = "default"
def _fixture(self, use_correlate_except):
Base = self.DeclarativeBasic
class Superclass(Base):
__tablename__ = "s1"
id = Column(Integer, primary_key=True)
common_id = Column(ForeignKey("c.id"))
common_relationship = relationship(
"Common", uselist=False, innerjoin=True, lazy="noload"
)
discriminator_field = Column(String)
__mapper_args__ = {
"polymorphic_identity": "superclass",
"polymorphic_on": discriminator_field,
}
class Subclass(Superclass):
__tablename__ = "s2"
id = Column(ForeignKey("s1.id"), primary_key=True)
__mapper_args__ = {"polymorphic_identity": "subclass"}
class Common(Base):
__tablename__ = "c"
id = Column(Integer, primary_key=True)
if use_correlate_except:
num_superclass = column_property(
select([func.count(Superclass.id)])
.where(Superclass.common_id == id)
.correlate_except(Superclass)
.scalar_subquery()
)
if not use_correlate_except:
Common.num_superclass = column_property(
select([func.count(Superclass.id)])
.where(Superclass.common_id == Common.id)
.correlate(Common)
.scalar_subquery()
)
return Common, Superclass
def test_poly_query_on_correlate(self):
Common, Superclass = self._fixture(False)
poly = with_polymorphic(Superclass, "*")
s = Session()
q = (
s.query(poly)
.options(contains_eager(poly.common_relationship))
.join(poly.common_relationship)
.filter(Common.id == 1)
)
# note the order of c.id, subquery changes based on if we
# used correlate or correlate_except; this is only with the
# patch in place. Not sure why this happens.
self.assert_compile(
q,
"SELECT c.id AS c_id, (SELECT count(s1.id) AS count_1 "
"FROM s1 LEFT OUTER JOIN s2 ON s1.id = s2.id "
"WHERE s1.common_id = c.id) AS anon_1, "
"s1.id AS s1_id, "
"s1.common_id AS s1_common_id, "
"s1.discriminator_field AS s1_discriminator_field, "
"s2.id AS s2_id FROM s1 "
"LEFT OUTER JOIN s2 ON s1.id = s2.id "
"JOIN c ON c.id = s1.common_id WHERE c.id = :id_1",
)
def test_poly_query_on_correlate_except(self):
Common, Superclass = self._fixture(True)
poly = with_polymorphic(Superclass, "*")
s = Session()
q = (
s.query(poly)
.options(contains_eager(poly.common_relationship))
.join(poly.common_relationship)
.filter(Common.id == 1)
)
# c.id, subquery are reversed.
self.assert_compile(
q,
"SELECT (SELECT count(s1.id) AS count_1 "
"FROM s1 LEFT OUTER JOIN s2 ON s1.id = s2.id "
"WHERE s1.common_id = c.id) AS anon_1, "
"c.id AS c_id, s1.id AS s1_id, "
"s1.common_id AS s1_common_id, "
"s1.discriminator_field AS s1_discriminator_field, "
"s2.id AS s2_id FROM s1 "
"LEFT OUTER JOIN s2 ON s1.id = s2.id "
"JOIN c ON c.id = s1.common_id WHERE c.id = :id_1",
)
|
{
"content_hash": "b8073fb1de798cc7d3861605000489ea",
"timestamp": "",
"source": "github",
"line_count": 2252,
"max_line_length": 79,
"avg_line_length": 29.32948490230906,
"alnum_prop": 0.48281604844814535,
"repo_name": "wujuguang/sqlalchemy",
"id": "2f8677f8bf41cc59a590f63108a8686bc9e0ad28",
"size": "66050",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/orm/inheritance/test_assorted_poly.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "45930"
},
{
"name": "Python",
"bytes": "11287383"
}
],
"symlink_target": ""
}
|
from mega import *
|
{
"content_hash": "73b91f6a6aa336df8d43f4864e975f55",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 18,
"avg_line_length": 19,
"alnum_prop": 0.7368421052631579,
"repo_name": "BiuroCo/mega",
"id": "aadf12eabee56b1b1aa06612dfe4e5ec35298300",
"size": "61",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "bindings/python/__init__.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "7267"
},
{
"name": "C",
"bytes": "1218890"
},
{
"name": "C++",
"bytes": "1961834"
},
{
"name": "Java",
"bytes": "219582"
},
{
"name": "Makefile",
"bytes": "1959"
},
{
"name": "Objective-C",
"bytes": "212166"
},
{
"name": "Objective-C++",
"bytes": "107918"
},
{
"name": "PHP",
"bytes": "26335"
},
{
"name": "Python",
"bytes": "112828"
},
{
"name": "QMake",
"bytes": "7098"
},
{
"name": "Shell",
"bytes": "35699"
},
{
"name": "VimL",
"bytes": "794"
}
],
"symlink_target": ""
}
|
"""Support for the MySQL database via the MySQL Connector/Python adapter.
MySQL Connector/Python is available at:
https://launchpad.net/myconnpy
Connecting
-----------
Connect string format::
mysql+mysqlconnector://<user>:<password>@<host>[:<port>]/<dbname>
"""
import re
from sqlalchemy.dialects.mysql.base import (MySQLDialect,
MySQLExecutionContext, MySQLCompiler, MySQLIdentifierPreparer,
BIT)
from sqlalchemy.engine import base as engine_base, default
from sqlalchemy.sql import operators as sql_operators
from sqlalchemy import exc, log, schema, sql, types as sqltypes, util
from sqlalchemy import processors
class MySQLExecutionContext_mysqlconnector(MySQLExecutionContext):
def get_lastrowid(self):
return self.cursor.lastrowid
class MySQLCompiler_mysqlconnector(MySQLCompiler):
def visit_mod(self, binary, **kw):
return self.process(binary.left) + " %% " + self.process(binary.right)
def post_process_text(self, text):
return text.replace('%', '%%')
class MySQLIdentifierPreparer_mysqlconnector(MySQLIdentifierPreparer):
def _escape_identifier(self, value):
value = value.replace(self.escape_quote, self.escape_to_quote)
return value.replace("%", "%%")
class _myconnpyBIT(BIT):
def result_processor(self, dialect, coltype):
"""MySQL-connector already converts mysql bits, so."""
return None
class MySQLDialect_mysqlconnector(MySQLDialect):
driver = 'mysqlconnector'
supports_unicode_statements = True
supports_unicode_binds = True
supports_sane_rowcount = True
supports_sane_multi_rowcount = True
supports_native_decimal = True
default_paramstyle = 'format'
execution_ctx_cls = MySQLExecutionContext_mysqlconnector
statement_compiler = MySQLCompiler_mysqlconnector
preparer = MySQLIdentifierPreparer_mysqlconnector
colspecs = util.update_copy(
MySQLDialect.colspecs,
{
BIT: _myconnpyBIT,
}
)
@classmethod
def dbapi(cls):
from mysql import connector
return connector
def create_connect_args(self, url):
opts = url.translate_connect_args(username='user')
opts.update(url.query)
util.coerce_kw_type(opts, 'buffered', bool)
util.coerce_kw_type(opts, 'raise_on_warnings', bool)
opts['buffered'] = True
opts['raise_on_warnings'] = True
# FOUND_ROWS must be set in ClientFlag to enable
# supports_sane_rowcount.
if self.dbapi is not None:
try:
from mysql.connector.constants import ClientFlag
client_flags = opts.get('client_flags', ClientFlag.get_default())
client_flags |= ClientFlag.FOUND_ROWS
opts['client_flags'] = client_flags
except:
pass
return [[], opts]
def _get_server_version_info(self, connection):
dbapi_con = connection.connection
from mysql.connector.constants import ClientFlag
dbapi_con.set_client_flag(ClientFlag.FOUND_ROWS)
version = dbapi_con.get_server_version()
return tuple(version)
def _detect_charset(self, connection):
return connection.connection.get_characterset_info()
def _extract_error_code(self, exception):
return exception.errno
def is_disconnect(self, e):
errnos = (2006, 2013, 2014, 2045, 2055, 2048)
exceptions = (self.dbapi.OperationalError,self.dbapi.InterfaceError)
if isinstance(e, exceptions):
return e.errno in errnos
else:
return False
def _compat_fetchall(self, rp, charset=None):
return rp.fetchall()
def _compat_fetchone(self, rp, charset=None):
return rp.fetchone()
dialect = MySQLDialect_mysqlconnector
|
{
"content_hash": "2c7291bdfb12f0f40ecf36c40754c864",
"timestamp": "",
"source": "github",
"line_count": 129,
"max_line_length": 81,
"avg_line_length": 29.697674418604652,
"alnum_prop": 0.6671887235708692,
"repo_name": "simplegeo/sqlalchemy",
"id": "bd9c9b8e28d9c184f3afd636179b37c55f153db9",
"size": "3831",
"binary": false,
"copies": "18",
"ref": "refs/heads/master",
"path": "lib/sqlalchemy/dialects/mysql/mysqlconnector.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "30110"
},
{
"name": "JavaScript",
"bytes": "26336"
},
{
"name": "Python",
"bytes": "5012225"
}
],
"symlink_target": ""
}
|
"""
Python Script for adding 1 & 2 finger multitouch gestures to implement
a right click option with Touchscreens in the Ubuntu unity environment.
This is implemented with the evdev Python library on an ELAN touchscreen.
Currently implements 2 types of right click options:
1 finger long touch: Timeout of 1.5 seconds, movement cancels action
2 finger tap: movement cancels action
"""
from evdev import InputDevice, ecodes, UInput, list_devices
from pymouse import PyMouse
from threading import Timer
import subprocess
import argparse
class TrackedEvent(object):
"""
Class for multitouch event tracking.
Track position, movement, slots used (total number of fingers in gesture),
timing of long presses, and event completion.
"""
def __init__(self, dev, abilities, var_x, var_y,
use_pymouse=False, long_press_workaround=False):
""" Initialize tracking attributes. """
self.dev = dev
self.abilities = abilities
self.long_press_workaround = long_press_workaround
self.vars = {'ABS_X': var_x, 'ABS_Y': var_y}
self.position = {'ABS_X': None, 'ABS_Y': None}
self.fingers = 0
self.total_event_fingers = 0
self.discard = 0
self.moved = 0
self.track_start = None
self.click_delay = 1.5
self.long_pressed = False
if use_pymouse:
self.mouse = PyMouse()
else:
self.mouse = None
def add_finger(self):
""" Add a detected finger. """
self.fingers += 1
self.total_event_fingers = self.fingers
def remove_fingers(self):
""" Remove detected finger upon release. """
if self.fingers == 1:
print('Total Fingers used: ', self.total_event_fingers)
self.fingers -= 1
if (self.fingers == 0 and
self.total_event_fingers == 2 and
self.moved == 0):
self.total_event_fingers = 0
self._initiate_right_click()
elif (self.fingers == 0 and
self.total_event_fingers == 1 and
self.moved == 0):
self.total_event_fingers = 0
try:
self.track_start.cancel()
self.track_start.join()
except AttributeError: # capture Nonetype track_start
pass
if self.long_pressed and not self.long_press_workaround:
self._initiate_right_click()
if self.fingers == 0:
self.discard = 1
def position_event(self, event_code, value):
""" tracks position to track movement of fingers """
if self.position[event_code] is None:
self.position[event_code] = value
else:
if abs(self.position[event_code] - value) > self.vars[event_code]:
self._moved_event()
if (self.fingers == 1 and self.position['ABS_X'] and
self.position['ABS_Y'] and self.track_start is None):
self._trackit()
def _trackit(self):
""" start timing for long press """
self.track_start = Timer(self.click_delay, self._long_press)
self.track_start.start()
print('tracking started!!!')
def _long_press(self):
if self.fingers == 1 and self.moved == 0:
self.long_pressed = True
if self.long_press_workaround:
subprocess.call(['xinput', '--disable', self.dev.name])
subprocess.call(['xinput', '--enable', self.dev.name])
self._initiate_right_click()
def _moved_event(self):
""" movement detected. """
self.moved = 1
def _initiate_right_click(self):
""" Internal method for initiating a right click at touch point. """
if self.mouse is None:
with UInput(self.abilities) as ui:
ui.write(ecodes.EV_ABS, ecodes.ABS_X, 0)
ui.write(ecodes.EV_ABS, ecodes.ABS_Y, 0)
ui.write(ecodes.EV_KEY, ecodes.BTN_RIGHT, 1)
ui.write(ecodes.EV_KEY, ecodes.BTN_RIGHT, 0)
ui.syn()
else:
x, y = self.mouse.position()
self.mouse.click(x, y, 2)
def initiate_gesture_find(use_pymouse=False, long_press_workaround=False):
"""
This function will scan all input devices until it finds an
ELAN touchscreen. It will then enter a loop to monitor this device
without blocking its usage by the system.
"""
for device in list_devices():
dev = InputDevice(device)
if (dev.name == 'ELAN Touchscreen') or \
(dev.name == 'Atmel Atmel maXTouch Digitizer'):
break
Abs_events = {}
abilities = {ecodes.EV_ABS: [ecodes.ABS_X, ecodes.ABS_Y],
ecodes.EV_KEY: (ecodes.BTN_LEFT, ecodes.BTN_RIGHT,
ecodes.BTN_TOUCH)}
# Assuming QHD screen on my Yoga 2 Pro as default for resolution measures
res_x = 13 # touch unit resolution # units/mm in x direction
res_y = 13 # touch unit resolution # units/mm in y direction
# would be weird if above resolutions differed, but will treat generically
codes = dev.capabilities()
for code in codes:
if code == 3:
for type_code in codes[code]:
human_code = ecodes.ABS[type_code[0]]
if human_code == 'ABS_X':
vals = type_code[1]
abilities[ecodes.EV_ABS][0] = (ecodes.ABS_X, vals)
res_x = vals[-1]
elif human_code == 'ABS_Y':
vals = type_code[1]
abilities[ecodes.EV_ABS][1] = (ecodes.ABS_Y, vals)
res_y = vals[-1]
Abs_events[type_code[0]] = human_code
# Average index finger width is 16-20 mm, assume 20 mm
# touch resolution noise assumed at 10% (5% radius), so 1.0 mm by default
# this seemed resonable from my own trial tests
var_x = 1.0 * res_x # variablity in movement allowed in x direction
var_y = 1.0 * res_y # variablity in movement allowed in y direction
MT_event = None
for event in dev.read_loop():
if event.type == ecodes.EV_ABS:
if MT_event is None:
MT_event = TrackedEvent(dev, abilities, var_x, var_y,
use_pymouse, long_press_workaround)
event_code = Abs_events[event.code]
if event_code == 'ABS_X' or event_code == 'ABS_Y':
MT_event.position_event(event_code, event.value)
elif event_code == 'ABS_MT_TRACKING_ID':
if event.value == -1:
MT_event.remove_fingers()
if MT_event.discard == 1:
MT_event = None
else:
MT_event.add_finger()
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Implements right click options on Linux Systems via Touchscreen')
parser.add_argument(
"--use_pymouse",
help="Uses PyMouse for initiating clicks instead of UInput",
action="store_true")
parser.add_argument(
"--long_press_workaround",
help="Uses xinupt to disable/enable touchscreen to raise context menu during press",
action="store_true")
args = parser.parse_args()
initiate_gesture_find(args.use_pymouse, args.long_press_workaround)
|
{
"content_hash": "9e98705572440ff9afb5ab4d00103eef",
"timestamp": "",
"source": "github",
"line_count": 191,
"max_line_length": 92,
"avg_line_length": 38.93717277486911,
"alnum_prop": 0.5745596342611268,
"repo_name": "Zyell/Python-Touchscreen-RightClick",
"id": "83c1f99dc1ccd9b59585cf4a25402037b83a6353",
"size": "7437",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Python_Touchscreen_RightClick.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7437"
}
],
"symlink_target": ""
}
|
"""Tests for templates module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import gast
from tensorflow.contrib.py2tf.pyct import compiler
from tensorflow.contrib.py2tf.pyct import templates
from tensorflow.python.platform import test
class TemplatesTest(test.TestCase):
def test_replace_variable(self):
template = """
def test_fn(a):
a += 1
a = 2 * a + 1
return b
"""
node = templates.replace(template, a='b')[0]
result = compiler.ast_to_object(node)
self.assertEquals(7, result.test_fn(2))
def test_replace_function_name(self):
template = """
def fname(a):
a += 1
a = 2 * a + 1
return a
"""
node = templates.replace(template, fname='test_fn')[0]
result = compiler.ast_to_object(node)
self.assertEquals(7, result.test_fn(2))
def test_code_block(self):
template = """
def test_fn(a):
block
return a
"""
node = templates.replace(
template,
block=[
gast.Assign([
gast.Name('a', None, None)
], gast.BinOp(gast.Name('a', None, None), gast.Add(), gast.Num(1))),
] * 2)[0]
result = compiler.ast_to_object(node)
self.assertEquals(3, result.test_fn(1))
if __name__ == '__main__':
test.main()
|
{
"content_hash": "a50ab36798947a53819d98ca76a7beca",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 80,
"avg_line_length": 23.35593220338983,
"alnum_prop": 0.5870827285921626,
"repo_name": "ravindrapanda/tensorflow",
"id": "1143131283cd92c42abfc73d5728fac96cc31c23",
"size": "2067",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tensorflow/contrib/py2tf/pyct/templates_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "7908"
},
{
"name": "C",
"bytes": "186817"
},
{
"name": "C++",
"bytes": "25164156"
},
{
"name": "CMake",
"bytes": "166422"
},
{
"name": "Go",
"bytes": "857510"
},
{
"name": "HTML",
"bytes": "568425"
},
{
"name": "Java",
"bytes": "317802"
},
{
"name": "JavaScript",
"bytes": "1399"
},
{
"name": "Jupyter Notebook",
"bytes": "1833659"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "37393"
},
{
"name": "Objective-C",
"bytes": "7037"
},
{
"name": "Objective-C++",
"bytes": "64142"
},
{
"name": "Protocol Buffer",
"bytes": "227436"
},
{
"name": "Python",
"bytes": "22238905"
},
{
"name": "Shell",
"bytes": "338684"
},
{
"name": "TypeScript",
"bytes": "797972"
}
],
"symlink_target": ""
}
|
from django.conf.urls import patterns, include, url
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^$', 'tdd.apps.lists.views.home_page', name='home'),
# url(r'^blog/', include('blog.urls')),
# url(r'^admin/', include(admin.site.urls)),
)
|
{
"content_hash": "9dfd2c8d585f4278cc026f255d71daa8",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 62,
"avg_line_length": 26.166666666666668,
"alnum_prop": 0.6464968152866242,
"repo_name": "kevinmanncito/django-tdd",
"id": "06d1c8bc906f30f49c03e935824097c2d33704cf",
"size": "314",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "5134"
}
],
"symlink_target": ""
}
|
from msrest.serialization import Model
class DeploymentLb(Model):
"""
Deployment operation parameters.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar uri: URI referencing the template. Default value:
"https://azuresdkci.blob.core.windows.net/templatehost/CreateLb_2016-07-19/azuredeploy.json"
.
:vartype uri: str
:param content_version: If included it must match the ContentVersion in
the template.
:type content_version: str
:ivar _artifacts_location: Container URI of of the template. Default
value:
"https://azuresdkci.blob.core.windows.net/templatehost/CreateLb_2016-07-19"
.
:vartype _artifacts_location: str
:param backend_pool_name: Name of load balancer backend pool.
:type backend_pool_name: str
:param dns_name_type: Associate VMs with a public IP address to a DNS
name. Possible values include: 'none', 'new'. Default value: "none" .
:type dns_name_type: str or :class:`dnsNameType
<lbcreationclient.models.dnsNameType>`
:param frontend_ip_name: Name of the frontend IP configuration. Default
value: "LoadBalancerFrontEnd" .
:type frontend_ip_name: str
:param load_balancer_name: Name for load balancer.
:type load_balancer_name: str
:param location: Location for load balancer resource.
:type location: str
:param private_ip_address: Static private IP address to use.
:type private_ip_address: str
:param private_ip_address_allocation: Private IP address allocation
method. Possible values include: 'dynamic', 'static'. Default value:
"dynamic" .
:type private_ip_address_allocation: str or
:class:`privateIpAddressAllocation
<lbcreationclient.models.privateIpAddressAllocation>`
:param public_ip_address: Name or ID of the public IP address to use.
:type public_ip_address: str
:param public_ip_address_allocation: Public IP address allocation method.
Possible values include: 'dynamic', 'static'. Default value: "dynamic" .
:type public_ip_address_allocation: str or
:class:`publicIpAddressAllocation
<lbcreationclient.models.publicIpAddressAllocation>`
:param public_ip_address_type: Type of Public IP Address to associate
with the load balancer. Possible values include: 'none', 'new',
'existingName', 'existingId'. Default value: "new" .
:type public_ip_address_type: str or :class:`publicIpAddressType
<lbcreationclient.models.publicIpAddressType>`
:param public_ip_dns_name: Globally unique DNS Name for the Public IP
used to access the Virtual Machine (new public IP only).
:type public_ip_dns_name: str
:param subnet: The subnet name or ID to associate with the load balancer.
Cannot be used in conjunction with a Public IP.
:type subnet: str
:param subnet_address_prefix: The subnet address prefix in CIDR format
(new subnet only). Default value: "10.0.0.0/24" .
:type subnet_address_prefix: str
:param subnet_type: Use new, existing or no subnet. Possible values
include: 'none', 'new', 'existingName', 'existingId'. Default value:
"none" .
:type subnet_type: str or :class:`subnetType
<lbcreationclient.models.subnetType>`
:param tags: Tags object.
:type tags: object
:param virtual_network_name: The VNet name containing the subnet. Cannot
be used in conjunction with a Public IP.
:type virtual_network_name: str
:param vnet_address_prefix: The virtual network IP address prefix in CIDR
format (new subnet only). Default value: "10.0.0.0/16" .
:type vnet_address_prefix: str
:ivar mode: Gets or sets the deployment mode. Default value:
"Incremental" .
:vartype mode: str
"""
_validation = {
'uri': {'required': True, 'constant': True},
'_artifacts_location': {'required': True, 'constant': True},
'load_balancer_name': {'required': True},
'mode': {'required': True, 'constant': True},
}
_attribute_map = {
'uri': {'key': 'properties.templateLink.uri', 'type': 'str'},
'content_version': {'key': 'properties.templateLink.contentVersion', 'type': 'str'},
'_artifacts_location': {'key': 'properties.parameters._artifactsLocation.value', 'type': 'str'},
'backend_pool_name': {'key': 'properties.parameters.backendPoolName.value', 'type': 'str'},
'dns_name_type': {'key': 'properties.parameters.dnsNameType.value', 'type': 'dnsNameType'},
'frontend_ip_name': {'key': 'properties.parameters.frontendIpName.value', 'type': 'str'},
'load_balancer_name': {'key': 'properties.parameters.loadBalancerName.value', 'type': 'str'},
'location': {'key': 'properties.parameters.location.value', 'type': 'str'},
'private_ip_address': {'key': 'properties.parameters.privateIpAddress.value', 'type': 'str'},
'private_ip_address_allocation': {'key': 'properties.parameters.privateIpAddressAllocation.value', 'type': 'privateIpAddressAllocation'},
'public_ip_address': {'key': 'properties.parameters.publicIpAddress.value', 'type': 'str'},
'public_ip_address_allocation': {'key': 'properties.parameters.publicIpAddressAllocation.value', 'type': 'publicIpAddressAllocation'},
'public_ip_address_type': {'key': 'properties.parameters.publicIpAddressType.value', 'type': 'publicIpAddressType'},
'public_ip_dns_name': {'key': 'properties.parameters.publicIpDnsName.value', 'type': 'str'},
'subnet': {'key': 'properties.parameters.subnet.value', 'type': 'str'},
'subnet_address_prefix': {'key': 'properties.parameters.subnetAddressPrefix.value', 'type': 'str'},
'subnet_type': {'key': 'properties.parameters.subnetType.value', 'type': 'subnetType'},
'tags': {'key': 'properties.parameters.tags.value', 'type': 'object'},
'virtual_network_name': {'key': 'properties.parameters.virtualNetworkName.value', 'type': 'str'},
'vnet_address_prefix': {'key': 'properties.parameters.vnetAddressPrefix.value', 'type': 'str'},
'mode': {'key': 'properties.mode', 'type': 'str'},
}
uri = "https://azuresdkci.blob.core.windows.net/templatehost/CreateLb_2016-07-19/azuredeploy.json"
_artifacts_location = "https://azuresdkci.blob.core.windows.net/templatehost/CreateLb_2016-07-19"
mode = "Incremental"
def __init__(self, load_balancer_name, content_version=None, backend_pool_name=None, dns_name_type="none", frontend_ip_name="LoadBalancerFrontEnd", location=None, private_ip_address=None, private_ip_address_allocation="dynamic", public_ip_address=None, public_ip_address_allocation="dynamic", public_ip_address_type="new", public_ip_dns_name=None, subnet=None, subnet_address_prefix="10.0.0.0/24", subnet_type="none", tags=None, virtual_network_name=None, vnet_address_prefix="10.0.0.0/16"):
self.content_version = content_version
self.backend_pool_name = backend_pool_name
self.dns_name_type = dns_name_type
self.frontend_ip_name = frontend_ip_name
self.load_balancer_name = load_balancer_name
self.location = location
self.private_ip_address = private_ip_address
self.private_ip_address_allocation = private_ip_address_allocation
self.public_ip_address = public_ip_address
self.public_ip_address_allocation = public_ip_address_allocation
self.public_ip_address_type = public_ip_address_type
self.public_ip_dns_name = public_ip_dns_name
self.subnet = subnet
self.subnet_address_prefix = subnet_address_prefix
self.subnet_type = subnet_type
self.tags = tags
self.virtual_network_name = virtual_network_name
self.vnet_address_prefix = vnet_address_prefix
|
{
"content_hash": "3aa582c834fb3457dcfd85380a032c3b",
"timestamp": "",
"source": "github",
"line_count": 138,
"max_line_length": 495,
"avg_line_length": 56.52173913043478,
"alnum_prop": 0.688076923076923,
"repo_name": "BurtBiel/azure-cli",
"id": "cd5e5b158c5949dfe17e0fe3c72ab4d35c1c2d14",
"size": "8487",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/command_modules/azure-cli-network/azure/cli/command_modules/network/mgmt_lb/lib/models/deployment_lb.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "429"
},
{
"name": "Python",
"bytes": "2108820"
},
{
"name": "Shell",
"bytes": "3300"
}
],
"symlink_target": ""
}
|
"""CronInfo tools.
A library for working with CronInfo records, describing cron entries for an
application. Supports loading the records from yaml.
"""
import logging
import sys
import traceback
try:
import pytz
except ImportError:
pytz = None
from google.appengine.cron import groc
from google.appengine.cron import groctimespecification
from google.appengine.api import appinfo
from google.appengine.api import validation
from google.appengine.api import yaml_builder
from google.appengine.api import yaml_listener
from google.appengine.api import yaml_object
_URL_REGEX = r'^/.*$'
_TIMEZONE_REGEX = r'^.{0,100}$'
_DESCRIPTION_REGEX = r'^.{0,499}$'
_VERSION_REGEX = appinfo.VERSION_RE_STRING
class GrocValidator(validation.Validator):
"""Checks that a schedule is in valid groc format."""
def Validate(self, value, key=None):
"""Validates a schedule."""
if value is None:
raise validation.MissingAttribute('schedule must be specified')
if not isinstance(value, basestring):
raise TypeError('schedule must be a string, not \'%r\''%type(value))
try:
groctimespecification.GrocTimeSpecification(value)
except groc.GrocException, e:
raise validation.ValidationError('schedule \'%s\' failed to parse: %s'%(
value, e.args[0]))
return value
class TimezoneValidator(validation.Validator):
"""Checks that a timezone can be correctly parsed and is known."""
def Validate(self, value, key=None):
"""Validates a timezone."""
if value is None:
return
if not isinstance(value, basestring):
raise TypeError('timezone must be a string, not \'%r\'' % type(value))
if pytz is None:
return value
try:
pytz.timezone(value)
except pytz.UnknownTimeZoneError:
raise validation.ValidationError('timezone \'%s\' is unknown' % value)
except IOError:
return value
except:
unused_e, v, t = sys.exc_info()
logging.warning('pytz raised an unexpected error: %s.\n' % (v) +
'Traceback:\n' + '\n'.join(traceback.format_tb(t)))
raise
return value
CRON = 'cron'
URL = 'url'
SCHEDULE = 'schedule'
TIMEZONE = 'timezone'
DESCRIPTION = 'description'
TARGET = 'target'
class MalformedCronfigurationFile(Exception):
"""Configuration file for Cron is malformed."""
pass
class CronEntry(validation.Validated):
"""A cron entry describes a single cron job."""
ATTRIBUTES = {
URL: _URL_REGEX,
SCHEDULE: GrocValidator(),
TIMEZONE: TimezoneValidator(),
DESCRIPTION: validation.Optional(_DESCRIPTION_REGEX),
TARGET: validation.Optional(_VERSION_REGEX),
}
class CronInfoExternal(validation.Validated):
"""CronInfoExternal describes all cron entries for an application."""
ATTRIBUTES = {
CRON: validation.Optional(validation.Repeated(CronEntry))
}
def LoadSingleCron(cron_info):
"""Load a cron.yaml file or string and return a CronInfoExternal object."""
builder = yaml_object.ObjectBuilder(CronInfoExternal)
handler = yaml_builder.BuilderHandler(builder)
listener = yaml_listener.EventListener(handler)
listener.Parse(cron_info)
cron_info = handler.GetResults()
if len(cron_info) < 1:
raise MalformedCronfigurationFile('Empty cron configuration.')
if len(cron_info) > 1:
raise MalformedCronfigurationFile('Multiple cron sections '
'in configuration.')
return cron_info[0]
|
{
"content_hash": "7926d4aa057cd34477a7cd1103bad3ab",
"timestamp": "",
"source": "github",
"line_count": 131,
"max_line_length": 78,
"avg_line_length": 26.404580152671755,
"alnum_prop": 0.6964440589765828,
"repo_name": "tungvx/deploy",
"id": "99a3efb91d95f809277a4e8902c5349548375eaa",
"size": "4064",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": ".google_appengine/google/appengine/api/croninfo.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "400492"
},
{
"name": "JavaScript",
"bytes": "477245"
},
{
"name": "Python",
"bytes": "16861113"
},
{
"name": "Shell",
"bytes": "8221"
}
],
"symlink_target": ""
}
|
import multiprocessing
import os, re, errno
import subprocess
def purge(dir, pattern):
for f in os.listdir(dir):
if re.search(pattern, f):
os.remove(os.path.join(dir, f))
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else: raise
# mkdir_p("work/aal2")
mkdir_p("work/summary2")
mkdir_p("results/aal")
# purge("work/aal2","")
purge("results/aal","fm_aal*")
total_processes = multiprocessing.cpu_count()
#total_processes = 1
counter=1
samplesize=100
randomnumbers=1000000
procs = []
# Loops through the processes
# The wait command informs the script to wait for all child processes to finish.
# The eve process partitions the events to a numbered process (counter).
# The output stream of events invokes getmodel which calculates the CDFs for that subset of events.
# The CDF stream invokes gulcalc which performs the ground up loss sampling. The required parameters are the number of Samples -S and number of random numbers -R, and output stream type -i.
# The ground up losses are streamed through to fmcalc to apply policy terms and conditions and output insured losses
# The insured losses are streamed through summarycalc to summarize the samples to policy level (sumamry set 1)
# The policy losses are streamed through aalcalc is output to binary files in a work folder.
while (counter <= total_processes) :
cmd="../src/eve/eve %d %d | ../src/getmodel/getmodel | ../src/gulcalc/gulcalc -r -S%d -i - | ../src/fmcalc/fmcalc | ../src/summarycalc/summarycalc -f -2 - > work/summary2/p%d.bin " % (counter,total_processes,samplesize,counter)
if os.name == "nt":
cmd = cmd.replace("/","\\")
print(cmd)
p1 = subprocess.Popen(cmd,shell=True)
procs.append(p1)
counter = counter + 1
for p in procs:
p.wait()
counter=1
# aalcalc runs on the output of summarycalc
cmd="../src/aalcalc/aalcalc -Ksummary2 > results/aal/fm_aal.csv"
if os.name == "nt":
cmd = cmd.replace("/","\\")
print(cmd)
p1 = subprocess.Popen(cmd,shell=True)
print("Finished. View outputs in results/aal")
|
{
"content_hash": "f6841dc25db5c88b20633740a9477423",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 231,
"avg_line_length": 35.693548387096776,
"alnum_prop": 0.6823316764572978,
"repo_name": "OasisLMF/ktools",
"id": "fd0dc57aeb67c7895abf0557835516672a37917c",
"size": "2236",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/aalcalc_example.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "23335"
},
{
"name": "C++",
"bytes": "755341"
},
{
"name": "CMake",
"bytes": "22491"
},
{
"name": "M4",
"bytes": "26990"
},
{
"name": "Makefile",
"bytes": "9041"
},
{
"name": "Python",
"bytes": "64494"
},
{
"name": "Shell",
"bytes": "92573"
}
],
"symlink_target": ""
}
|
"""Support for stiebel_eltron climate platform."""
import logging
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
HVAC_MODE_AUTO,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
PRESET_ECO,
SUPPORT_PRESET_MODE,
SUPPORT_TARGET_TEMPERATURE,
)
from homeassistant.const import ATTR_TEMPERATURE, TEMP_CELSIUS
from . import DOMAIN as STE_DOMAIN
DEPENDENCIES = ["stiebel_eltron"]
_LOGGER = logging.getLogger(__name__)
PRESET_DAY = "day"
PRESET_SETBACK = "setback"
PRESET_EMERGENCY = "emergency"
SUPPORT_FLAGS = SUPPORT_TARGET_TEMPERATURE | SUPPORT_PRESET_MODE
SUPPORT_HVAC = [HVAC_MODE_AUTO, HVAC_MODE_HEAT, HVAC_MODE_OFF]
SUPPORT_PRESET = [PRESET_ECO, PRESET_DAY, PRESET_EMERGENCY, PRESET_SETBACK]
# Mapping STIEBEL ELTRON states to homeassistant states/preset.
STE_TO_HA_HVAC = {
"AUTOMATIC": HVAC_MODE_AUTO,
"MANUAL MODE": HVAC_MODE_HEAT,
"STANDBY": HVAC_MODE_AUTO,
"DAY MODE": HVAC_MODE_AUTO,
"SETBACK MODE": HVAC_MODE_AUTO,
"DHW": HVAC_MODE_OFF,
"EMERGENCY OPERATION": HVAC_MODE_AUTO,
}
STE_TO_HA_PRESET = {
"STANDBY": PRESET_ECO,
"DAY MODE": PRESET_DAY,
"SETBACK MODE": PRESET_SETBACK,
"EMERGENCY OPERATION": PRESET_EMERGENCY,
}
HA_TO_STE_HVAC = {
HVAC_MODE_AUTO: "AUTOMATIC",
HVAC_MODE_HEAT: "MANUAL MODE",
HVAC_MODE_OFF: "DHW",
}
HA_TO_STE_PRESET = {k: i for i, k in STE_TO_HA_PRESET.items()}
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the StiebelEltron platform."""
name = hass.data[STE_DOMAIN]["name"]
ste_data = hass.data[STE_DOMAIN]["ste_data"]
add_entities([StiebelEltron(name, ste_data)], True)
class StiebelEltron(ClimateEntity):
"""Representation of a STIEBEL ELTRON heat pump."""
def __init__(self, name, ste_data):
"""Initialize the unit."""
self._name = name
self._target_temperature = None
self._current_temperature = None
self._current_humidity = None
self._operation = None
self._filter_alarm = None
self._force_update = False
self._ste_data = ste_data
@property
def supported_features(self):
"""Return the list of supported features."""
return SUPPORT_FLAGS
def update(self):
"""Update unit attributes."""
self._ste_data.update(no_throttle=self._force_update)
self._force_update = False
self._target_temperature = self._ste_data.api.get_target_temp()
self._current_temperature = self._ste_data.api.get_current_temp()
self._current_humidity = self._ste_data.api.get_current_humidity()
self._filter_alarm = self._ste_data.api.get_filter_alarm_status()
self._operation = self._ste_data.api.get_operation()
_LOGGER.debug(
"Update %s, current temp: %s", self._name, self._current_temperature
)
@property
def device_state_attributes(self):
"""Return device specific state attributes."""
return {"filter_alarm": self._filter_alarm}
@property
def name(self):
"""Return the name of the climate device."""
return self._name
# Handle SUPPORT_TARGET_TEMPERATURE
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return TEMP_CELSIUS
@property
def current_temperature(self):
"""Return the current temperature."""
return self._current_temperature
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return self._target_temperature
@property
def target_temperature_step(self):
"""Return the supported step of target temperature."""
return 0.1
@property
def min_temp(self):
"""Return the minimum temperature."""
return 10.0
@property
def max_temp(self):
"""Return the maximum temperature."""
return 30.0
@property
def current_humidity(self):
"""Return the current humidity."""
return float(f"{self._current_humidity:.1f}")
@property
def hvac_modes(self):
"""List of the operation modes."""
return SUPPORT_HVAC
@property
def hvac_mode(self):
"""Return current operation ie. heat, cool, idle."""
return STE_TO_HA_HVAC.get(self._operation)
@property
def preset_mode(self):
"""Return the current preset mode, e.g., home, away, temp."""
return STE_TO_HA_PRESET.get(self._operation)
@property
def preset_modes(self):
"""Return a list of available preset modes."""
return SUPPORT_PRESET
def set_hvac_mode(self, hvac_mode):
"""Set new operation mode."""
if self.preset_mode:
return
new_mode = HA_TO_STE_HVAC.get(hvac_mode)
_LOGGER.debug("set_hvac_mode: %s -> %s", self._operation, new_mode)
self._ste_data.api.set_operation(new_mode)
self._force_update = True
def set_temperature(self, **kwargs):
"""Set new target temperature."""
target_temperature = kwargs.get(ATTR_TEMPERATURE)
if target_temperature is not None:
_LOGGER.debug("set_temperature: %s", target_temperature)
self._ste_data.api.set_target_temp(target_temperature)
self._force_update = True
def set_preset_mode(self, preset_mode: str):
"""Set new preset mode."""
new_mode = HA_TO_STE_PRESET.get(preset_mode)
_LOGGER.debug("set_hvac_mode: %s -> %s", self._operation, new_mode)
self._ste_data.api.set_operation(new_mode)
self._force_update = True
|
{
"content_hash": "6882d554817e3f924a2299e473b1b3bb",
"timestamp": "",
"source": "github",
"line_count": 186,
"max_line_length": 80,
"avg_line_length": 30.440860215053764,
"alnum_prop": 0.6336983398092547,
"repo_name": "partofthething/home-assistant",
"id": "d8c32575b1703a7dd0d8c6288133069b552ae9ed",
"size": "5662",
"binary": false,
"copies": "14",
"ref": "refs/heads/dev",
"path": "homeassistant/components/stiebel_eltron/climate.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1720"
},
{
"name": "Python",
"bytes": "31051838"
},
{
"name": "Shell",
"bytes": "4832"
}
],
"symlink_target": ""
}
|
class OrderError(Exception):
pass
class OrderIdentifierError(OrderError):
"""
Order exception that is raised if order identifier was not found.
"""
pass
|
{
"content_hash": "a948a2001f050ea620457d6c774d9444",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 69,
"avg_line_length": 19.444444444444443,
"alnum_prop": 0.6914285714285714,
"repo_name": "druids/django-pyston",
"id": "9e60452ef89009d7252199f64842fc7352099c9b",
"size": "175",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyston/order/exceptions.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "9676"
},
{
"name": "Makefile",
"bytes": "3369"
},
{
"name": "Python",
"bytes": "405274"
}
],
"symlink_target": ""
}
|
from .iob_to_docs import iob_to_docs # noqa: F401
from .conll_ner_to_docs import conll_ner_to_docs # noqa: F401
from .json_to_docs import json_to_docs # noqa: F401
from .conllu_to_docs import conllu_to_docs # noqa: F401
|
{
"content_hash": "57c0732951af6e5aedb7b568d5c4de99",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 62,
"avg_line_length": 56,
"alnum_prop": 0.7232142857142857,
"repo_name": "explosion/spaCy",
"id": "e91b6aaa6e43fcb689c98e9471aecea41b3c8d7b",
"size": "224",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "spacy/training/converters/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "9571"
},
{
"name": "C++",
"bytes": "187"
},
{
"name": "Cython",
"bytes": "784034"
},
{
"name": "Dockerfile",
"bytes": "432"
},
{
"name": "HTML",
"bytes": "29880"
},
{
"name": "JavaScript",
"bytes": "240056"
},
{
"name": "Jinja",
"bytes": "12977"
},
{
"name": "Makefile",
"bytes": "1576"
},
{
"name": "Python",
"bytes": "3783857"
},
{
"name": "Sass",
"bytes": "56930"
},
{
"name": "Shell",
"bytes": "984"
}
],
"symlink_target": ""
}
|
import cStringIO
import yaml
from nailgun.db.sqlalchemy.fixman import upload_fixture
from nailgun.db.sqlalchemy.models import Node
from nailgun.db.sqlalchemy.models import Release
from nailgun.openstack.common import jsonutils
from nailgun.test.base import BaseIntegrationTest
class TestFixture(BaseIntegrationTest):
fixtures = ['admin_network', 'sample_environment']
def test_upload_working(self):
check = self.db.query(Node).all()
self.assertEqual(len(list(check)), 8)
def test_json_fixture(self):
data = '''[{
"pk": 2,
"model": "nailgun.release",
"fields": {
"name": "JSONFixtureRelease",
"version": "0.0.1",
"description": "Sample release for testing",
"operating_system": "CentOS"
}
}]'''
upload_fixture(cStringIO.StringIO(data), loader=jsonutils)
check = self.db.query(Release).filter(
Release.name == u"JSONFixtureRelease"
)
self.assertEqual(len(list(check)), 1)
def test_yaml_fixture(self):
data = '''---
- &base_release
model: nailgun.release
fields:
name: BaseRelease
version: 0.0.1
operating_system: AbstractOS
- pk: 2
extend: *base_release
fields:
name: YAMLFixtureRelease
version: 1.0.0
operating_system: CentOS
'''
upload_fixture(cStringIO.StringIO(data), loader=yaml)
check = self.db.query(Release).filter(
Release.name == u"YAMLFixtureRelease"
)
self.assertEqual(len(list(check)), 1)
check = self.db.query(Release).filter(
Release.name == u"BaseRelease"
)
self.assertEqual(len(list(check)), 0)
def test_fixture_roles_order(self):
data = '''[{
"pk": 1,
"model": "nailgun.release",
"fields": {
"name": "CustomFixtureRelease1",
"version": "0.0.1",
"description": "Sample release for testing",
"operating_system": "CentOS",
"roles": ["controller", "compute", "cinder", "ceph-osd"]
}
}]'''
upload_fixture(cStringIO.StringIO(data), loader=jsonutils)
rel = self.db.query(Release).filter(
Release.name == u"CustomFixtureRelease1"
).all()
self.assertEqual(len(rel), 1)
self.assertEqual(list(rel[0].roles),
["controller", "compute", "cinder", "ceph-osd"])
data = '''[{
"pk": 2,
"model": "nailgun.release",
"fields": {
"name": "CustomFixtureRelease2",
"version": "0.0.1",
"description": "Sample release for testing",
"operating_system": "CentOS",
"roles": ["compute", "ceph-osd", "controller", "cinder"]
}
}]'''
upload_fixture(cStringIO.StringIO(data), loader=jsonutils)
rel = self.db.query(Release).filter(
Release.name == u"CustomFixtureRelease2"
).all()
self.assertEqual(len(rel), 1)
self.assertEqual(list(rel[0].roles),
["compute", "ceph-osd", "controller", "cinder"])
data = '''[{
"pk": 3,
"model": "nailgun.release",
"fields": {
"name": "CustomFixtureRelease3",
"version": "0.0.1",
"description": "Sample release for testing",
"operating_system": "CentOS",
"roles": ["compute", "cinder", "controller", "cinder"]
}
}]'''
upload_fixture(cStringIO.StringIO(data), loader=jsonutils)
rel = self.db.query(Release).filter(
Release.name == u"CustomFixtureRelease3"
).all()
self.assertEqual(len(rel), 1)
self.assertEqual(list(rel[0].roles),
["compute", "cinder", "controller"])
# check previously added release roles
prev_rel = self.db.query(Release).filter(
Release.name == u"CustomFixtureRelease2"
).all()
self.assertEqual(len(prev_rel), 1)
self.assertEqual(list(prev_rel[0].roles),
["compute", "ceph-osd", "controller", "cinder"])
|
{
"content_hash": "25f2ea73f9b1df632181162788c16702",
"timestamp": "",
"source": "github",
"line_count": 126,
"max_line_length": 73,
"avg_line_length": 34.25396825396825,
"alnum_prop": 0.5373030583873958,
"repo_name": "andrei4ka/fuel-web-redhat",
"id": "e566a651b2a697a9413dc13fc824c3a3009ba94d",
"size": "4951",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "nailgun/nailgun/test/integration/test_fixture_uploading.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "100524"
},
{
"name": "JavaScript",
"bytes": "639783"
},
{
"name": "Makefile",
"bytes": "5891"
},
{
"name": "Puppet",
"bytes": "282"
},
{
"name": "Python",
"bytes": "3206343"
},
{
"name": "Ruby",
"bytes": "33423"
},
{
"name": "Shell",
"bytes": "31460"
}
],
"symlink_target": ""
}
|
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import random
import os
import sys
from resource_management import *
class Spark_Component(Script):
def install(self, env):
self.install_packages(env)
def configure(self, env):
import params
env.set_params(params)
def start(self, env):
import params
env.set_params(params)
self.configure(env)
worker_id = random.randint(1, 10000)
pid_file = format("{app_pid_dir}/spark-yarn-org.apache.spark.deploy.worker.Worker-{worker_id}.pid")
start_spark_cmd = """env SPARK_PID_DIR={app_pid_dir} SPARK_LOG_DIR={app_log_dir} {app_root}/sbin/start-slave.sh {worker_id} spark://{master_host}:{master_port}
"""
process_cmd = format(start_spark_cmd.replace("\n", " "))
print("Starting Spark slave using command: "+process_cmd)
Execute(process_cmd,
logoutput=True,
wait_for_finish=False,
pid_file=pid_file,
poll_after = 10,
cwd=format("{app_root}")
)
def stop(self, env):
import params
env.set_params(params)
stop_cmd = format("{app_root}/sbin/stop-slave.sh")
Execute(stop_cmd,
logoutput=True,
wait_for_finish=True,
cwd=format("{app_root}")
)
def status(self, env):
import params
env.set_params(params)
# check the first pid file in this directory
for item in os.listdir(params.app_pid_dir):
pid_file = os.path.join(params.app_pid_dir, item)
check_process_status(pid_file)
break
if __name__ == "__main__":
Spark_Component().execute()
|
{
"content_hash": "2367b8398fe3ba46260c142a3a477ff6",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 163,
"avg_line_length": 30.945945945945947,
"alnum_prop": 0.6864628820960699,
"repo_name": "RamVenkatesh/spark-slider",
"id": "437b67db1b575f40046c4c1a237fbbdd8b74681a",
"size": "2312",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "package/scripts/spark_slave.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "5887"
}
],
"symlink_target": ""
}
|
from chips.api.api import *
import sys
my_chip = Chip("interconnect")
wire = Wire(my_chip)
Component("test_suite/producer.c")(my_chip, inputs={}, outputs={"z":wire})
Component("test_suite/consumer.c")(my_chip, inputs={"a":wire}, outputs={})
my_chip.generate_verilog()
my_chip.generate_testbench(100000)
my_chip.compile_iverilog(True)
my_chip = Chip("interconnect")
wire = Wire(my_chip)
Component("test_suite/slow_producer.c")(my_chip, inputs={}, outputs={"z":wire})
Component("test_suite/consumer.c")(my_chip, inputs={"a":wire}, outputs={})
my_chip.generate_verilog()
my_chip.generate_testbench(100000)
my_chip.compile_iverilog(True)
my_chip = Chip("interconnect")
wire = Wire(my_chip)
Component("test_suite/producer.c")(my_chip, inputs={}, outputs={"z":wire})
Component("test_suite/slow_consumer.c")(my_chip, inputs={"a":wire}, outputs={})
my_chip.generate_verilog()
my_chip.generate_testbench(100000)
my_chip.compile_iverilog(True)
my_chip = Chip("interconnect")
wire = Wire(my_chip)
Component("test_suite/slow_producer.c")(my_chip, inputs={}, outputs={"z":wire})
Component("test_suite/slow_consumer.c")(my_chip, inputs={"a":wire}, outputs={})
my_chip.generate_verilog()
my_chip.generate_testbench(100000)
my_chip.compile_iverilog(True)
os.remove("producer.v")
os.remove("consumer.v")
os.remove("interconnect_tb")
os.remove("interconnect.v")
os.remove("interconnect_tb.v")
|
{
"content_hash": "db43eff718715bf7475f8e004ed85b36",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 79,
"avg_line_length": 33.68292682926829,
"alnum_prop": 0.722664735698769,
"repo_name": "freecores/tcp_socket",
"id": "735ef1dd48025dd81c82e587e1df44a85a26d7e2",
"size": "1404",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "chips2/interconnect.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "19773"
},
{
"name": "JavaScript",
"bytes": "234"
},
{
"name": "Objective-C",
"bytes": "21197"
},
{
"name": "Python",
"bytes": "323710"
},
{
"name": "Shell",
"bytes": "3212"
},
{
"name": "VHDL",
"bytes": "52596"
},
{
"name": "Verilog",
"bytes": "345582"
}
],
"symlink_target": ""
}
|
"""Prepares the rendering of Smart Grid Game widget."""
from django.http import HttpResponseRedirect
from django.contrib.auth.decorators import login_required
from django.views.decorators.cache import never_cache
from apps.widgets.smartgrid_library.models import LibraryAction
def supply(request, page_name):
"""Supplies view_objects for smartgrid library widgets."""
_ = page_name
_ = request
return {
"levels": None,
}
@never_cache
@login_required
def library_action_admin(request, pk):
"""handle the library action admin."""
_ = request
action = LibraryAction.objects.get(pk=pk)
action_type = action.type
return HttpResponseRedirect("/admin/smartgrid_library/library%s/%s/" % (action_type, pk))
@never_cache
@login_required
def library_action_admin_list(request):
"""handle the library action admin."""
_ = request
return HttpResponseRedirect("/admin/smartgrid_library/libraryaction/")
|
{
"content_hash": "ede988f14df5e27a6b1c7e241ef96f6e",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 93,
"avg_line_length": 26.054054054054053,
"alnum_prop": 0.7157676348547718,
"repo_name": "MakahikiKTUH/makahiki-ktuh",
"id": "ce2d5c8cb60876c4966aac4e1509e5a08a849a6f",
"size": "964",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "makahiki/apps/widgets/smartgrid_library/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "293007"
},
{
"name": "Python",
"bytes": "1965590"
},
{
"name": "Shell",
"bytes": "6556"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('nomi', '0027_auto_20170530_1434'),
]
operations = [
migrations.AddField(
model_name='nomination',
name='club_nomi',
field=models.ForeignKey(max_length=100, null=True, on_delete=django.db.models.deletion.CASCADE, to='nomi.Club'),
),
]
|
{
"content_hash": "76f34edefe314831cd6eda7d20e3134c",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 124,
"avg_line_length": 25.63157894736842,
"alnum_prop": 0.6344969199178645,
"repo_name": "aniketp41/Gymkhana-Nominations",
"id": "a2748a7c23834d43fddffbfd2141f9486a937773",
"size": "559",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "nomi/migrations/0028_nomination_club_nomi.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "13871"
},
{
"name": "HTML",
"bytes": "187973"
},
{
"name": "JavaScript",
"bytes": "484"
},
{
"name": "Python",
"bytes": "249674"
}
],
"symlink_target": ""
}
|
"""
pyexcel_io.service
~~~~~~~~~~~~~~~~~~~
provide service code to downstream projects
:copyright: (c) 2014-2022 by Onni Software Ltd.
:license: New BSD License, see LICENSE for more details
"""
import re
import math
import datetime
from pyexcel_io import constants, exceptions
def has_no_digits_in_float(value):
"""check if a float value had zero value in digits"""
return value == math.floor(value)
def detect_date_value(cell_text):
"""
Read the date formats that were written by csv.writer
"""
ret = None
try:
if len(cell_text) == 10:
ret = datetime.datetime.strptime(cell_text, "%Y-%m-%d")
ret = ret.date()
elif len(cell_text) == 19:
ret = datetime.datetime.strptime(cell_text, "%Y-%m-%d %H:%M:%S")
elif len(cell_text) > 19:
ret = datetime.datetime.strptime(
cell_text[0:26], "%Y-%m-%d %H:%M:%S.%f"
)
except ValueError:
pass
return ret
def detect_float_value(
cell_text, pep_0515_off=True, ignore_nan_text=False, default_float_nan=None
):
should_we_skip_it = (
cell_text.startswith("0") and cell_text.startswith("0.") is False
)
if should_we_skip_it:
# do not convert if a number starts with 0
# e.g. 014325
return None
if pep_0515_off:
pattern = "([0-9]+_)+[0-9]+.[0-9]*$"
if re.match(pattern, cell_text):
return None
try:
if ignore_nan_text:
if cell_text.lower() == "nan":
return None
else:
return float(cell_text)
else:
if cell_text.lower() == "nan":
if cell_text == default_float_nan:
return float("NaN")
else:
return None
else:
return float(cell_text)
except ValueError:
return None
def detect_int_value(cell_text, pep_0515_off=True):
if cell_text.startswith("0") and len(cell_text) > 1:
return None
if pep_0515_off:
pattern = "([0-9]+_)+[0-9]+$"
if re.match(pattern, cell_text):
return None
try:
return int(cell_text)
except ValueError:
pattern = "([0-9]+,)*[0-9]+$"
if re.match(pattern, cell_text):
integer_string = cell_text.replace(",", "")
return int(integer_string)
else:
return None
def float_value(value):
"""convert a value to float"""
ret = float(value)
return ret
def date_value(value):
"""convert to data value accroding ods specification"""
ret = "invalid"
try:
# catch strptime exceptions only
if len(value) == 10:
ret = datetime.datetime.strptime(value, "%Y-%m-%d")
ret = ret.date()
elif len(value) == 19:
ret = datetime.datetime.strptime(value, "%Y-%m-%dT%H:%M:%S")
elif len(value) > 19:
ret = datetime.datetime.strptime(
value[0:26], "%Y-%m-%dT%H:%M:%S.%f"
)
except ValueError:
pass
if ret == "invalid":
raise Exception("Bad date value %s" % value)
return ret
def time_value(value):
"""convert to time value accroding the specification"""
import re
results = re.match(r"PT(\d+)H(\d+)M(\d+)S", value)
if results and len(results.groups()) == 3:
hour = int(results.group(1))
minute = int(results.group(2))
second = int(results.group(3))
if hour < 24:
ret = datetime.time(hour, minute, second)
else:
ret = datetime.timedelta(
hours=hour, minutes=minute, seconds=second
)
else:
ret = None
return ret
def boolean_value(value):
"""get bolean value"""
if value == "true":
ret = True
elif value == "false":
ret = False
else:
# needed for pyexcel-ods3
ret = value
return ret
ODS_FORMAT_CONVERSION = {
"float": float,
"date": datetime.date,
"time": datetime.time,
"timedelta": datetime.timedelta,
"boolean": bool,
"percentage": float,
"currency": float,
}
ODS_WRITE_FORMAT_COVERSION = {
float: "float",
int: "float",
str: "string",
datetime.date: "date",
datetime.time: "time",
datetime.timedelta: "timedelta",
datetime.datetime: "datetime",
bool: "boolean",
}
VALUE_CONVERTERS = {
"float": float_value,
"date": date_value,
"time": time_value,
"timedelta": time_value,
"boolean": boolean_value,
"percentage": float_value,
}
def throw_exception(value):
raise exceptions.IntegerAccuracyLossError("%s is too big" % value)
def ods_float_value(value):
if value > constants.MAX_INTEGER:
raise exceptions.IntegerAccuracyLossError("%s is too big" % value)
return value
def ods_date_value(value):
return value.strftime("%Y-%m-%d")
def ods_time_value(value):
return value.strftime("PT%HH%MM%SS")
def ods_bool_value(value):
"""convert a boolean value to text"""
if value is True:
return "true"
else:
return "false"
def ods_timedelta_value(cell):
"""convert a cell value to time delta"""
hours = cell.days * 24 + cell.seconds // 3600
minutes = (cell.seconds // 60) % 60
seconds = cell.seconds % 60
return "PT%02dH%02dM%02dS" % (hours, minutes, seconds)
ODS_VALUE_CONVERTERS = {
"date": ods_date_value,
"time": ods_time_value,
"boolean": ods_bool_value,
"timedelta": ods_timedelta_value,
"float": ods_float_value,
"long": ods_float_value,
}
VALUE_TOKEN = {
"float": "value",
"date": "date-value",
"time": "time-value",
"boolean": "boolean-value",
"percentage": "value",
"currency": "value",
"timedelta": "time-value",
}
|
{
"content_hash": "607a666cf9e4c6fb449d1b999c30b22e",
"timestamp": "",
"source": "github",
"line_count": 244,
"max_line_length": 79,
"avg_line_length": 24.114754098360656,
"alnum_prop": 0.5584636301835486,
"repo_name": "chfw/pyexcel-io",
"id": "958ee1ed1603e8f6fcc5d426f196976a97ab1d3b",
"size": "5884",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pyexcel_io/service.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "129"
},
{
"name": "Makefile",
"bytes": "145"
},
{
"name": "Python",
"bytes": "109782"
},
{
"name": "Shell",
"bytes": "152"
}
],
"symlink_target": ""
}
|
"""Utilities for releasing values from a federated program to logs."""
from typing import Any
from absl import logging
from tensorflow_federated.python.common_libs import py_typecheck
from tensorflow_federated.python.core.impl.types import computation_types
from tensorflow_federated.python.program import release_manager
from tensorflow_federated.python.program import value_reference
class LoggingReleaseManager(release_manager.ReleaseManager):
"""A `tff.program.ReleaseManager` that releases values to logs.
A `tff.program.LoggingReleaseManager` is a utility for releasing values from a
federated program to logs and is used to release values from platform storage
to customer storage in a federated program.
Values are released to logs as string representations of Python objects. When
the value is released, if the value is a value reference or a structure
containing value references, each value reference is materialized.
"""
async def release(self,
value: Any,
type_signature: computation_types.Type,
key: Any = None) -> None:
"""Releases `value` from a federated program.
Args:
value: A materialized value, a value reference, or a structure of
materialized values and value references representing the value to
release.
type_signature: The `tff.Type` of `value`.
key: An optional value used to reference the released `value`.
"""
py_typecheck.check_type(type_signature, computation_types.Type)
materialized_value = await value_reference.materialize_value(value)
logging.info('Releasing')
logging.info(' value: %s', materialized_value)
logging.info(' type: %s', type_signature)
if key is not None:
logging.info(' key: %s', key)
|
{
"content_hash": "f5125994f7928f8978b70d7759f07b57",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 80,
"avg_line_length": 40.06666666666667,
"alnum_prop": 0.7249029395452025,
"repo_name": "tensorflow/federated",
"id": "ebe418ae11db0f99cb9d207ee3e284d0f6c04d03",
"size": "2402",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tensorflow_federated/python/program/logging_release_manager.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "729470"
},
{
"name": "Dockerfile",
"bytes": "1983"
},
{
"name": "Python",
"bytes": "6700736"
},
{
"name": "Shell",
"bytes": "7123"
},
{
"name": "Starlark",
"bytes": "387382"
}
],
"symlink_target": ""
}
|
from typing import List, Optional, Tuple
import gdstk
import gdsfactory as gf
from gdsfactory.component import Component
from gdsfactory.component_layout import _parse_layer
@gf.cell
def trim(
component: Component,
domain: List[Tuple[float, float]],
precision: float = 1e-4,
return_ports: Optional[bool] = False,
) -> Component:
"""Trim a component by another geometry, preserving the component's layers and ports.
Useful to get a smaller component from a larger one for simulation.
Args:
component: Component(/Reference)
domain: list of array-like[N][2] representing the boundary of the component to keep.
precision: float Desired precision for rounding vertex coordinates.
return_ports: whether to return the included ports or not. Ports are always renamed to avoid inheritance conflicts.
Returns: New component with layers (and possibly ports) of the component restricted to the domain.
"""
domain_shape = gdstk.Polygon(domain)
c = Component()
for layer, layer_polygons in component.get_polygons(by_spec=True).items():
gds_layer, gds_datatype = _parse_layer(layer)
for layer_polygon in layer_polygons:
p = gdstk.boolean(
operand1=gdstk.Polygon(layer_polygon),
operand2=domain_shape,
operation="and",
precision=precision,
layer=gds_layer,
datatype=gds_datatype,
)
if p:
c.add_polygon(p, layer=layer)
if return_ports:
ports = []
i = 0
for port in component.get_ports():
if gdstk.inside([port.center], domain_shape):
new_name = f"{port.name[:1]}{i}"
ports.append(port.copy(new_name))
i += 1
c.add_ports(ports)
c.auto_rename_ports_layer_orientation()
return c
if __name__ == "__main__":
c = gf.components.straight_pin(length=10, taper=None)
trimmed_c = trim(component=c, domain=[[0, -5], [0, 5], [5, 5], [5, -5]])
trimmed_c.show(show_ports=True)
|
{
"content_hash": "e0701df49f2cc8f107d56f746730ea9d",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 123,
"avg_line_length": 33.15625,
"alnum_prop": 0.61875589066918,
"repo_name": "gdsfactory/gdsfactory",
"id": "961aa44dbe9512cc33e3358ed864a9a1b30016d9",
"size": "2122",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gdsfactory/geometry/trim.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "605"
},
{
"name": "Dockerfile",
"bytes": "31"
},
{
"name": "Makefile",
"bytes": "4572"
},
{
"name": "Python",
"bytes": "2471982"
},
{
"name": "Shell",
"bytes": "671"
},
{
"name": "XS",
"bytes": "10045"
}
],
"symlink_target": ""
}
|
"""Functions for working with state-space models."""
import copy
import numpy as np
# pylint doesn't like capital letters in variable names, in contrast
# to control systems conventions.
# pylint: disable=invalid-name
class SignalListInvalidArgumentException(Exception):
"""Raised if an invalid argument is passed to a method of SignaList."""
pass
class SignalList(object):
"""Representation of a named list of signals.
A signal list consists of a non-repeating ordered set of signal
names (each representing a scalar valued signal).
"""
def __init__(self, names):
"""Constructor.
Args:
names: List of signal names.
Raises:
SignalListInvalidArgumentException: If there are repeated names.
"""
self.names = copy.copy(names)
if not all([isinstance(name, str) for name in self.names]):
raise SignalListInvalidArgumentException('Names must be strings.', names)
# Test that there are not repeated names.
if len(self.names) != len(set(self.names)):
raise SignalListInvalidArgumentException('Repeated signal name.', names)
self._names_to_indices = {
name: i for i, name in enumerate(self.names)
}
def __repr__(self):
return ', '.join(self.names)
def __getitem__(self, indices):
"""Select a subset of an input list.
Args:
indices: Can be a slice or a list. If a list, may contain
a mixture of integer entries and string entries.
Raises:
SignalListInvalidArgumentException: If indices is not a list.
Returns:
A new signal list consisting of a subset of the existing list
in the given order (units are tracked appropriately).
"""
if isinstance(indices, slice):
selection = range(len(self))[indices]
elif not isinstance(indices, list):
raise SignalListInvalidArgumentException(indices)
else:
selection = copy.copy(indices)
for i in range(len(selection)):
if isinstance(selection[i], int):
selection[i] = self.names[selection[i]]
return SignalList(selection)
def __len__(self):
return len(self.names)
def __add__(self, other):
return SignalList(self.names + other.names)
def GetIndices(self, names):
"""Get the integer indices corresponding to a list of signal names."""
return [self._names_to_indices[name] for name in names]
def AddSuffix(self, suffix):
return SignalList([name + suffix for name in self.names])
class SystemInvalidArgumentException(Exception):
"""Raised if an invalid argument is passed to a method of System."""
pass
class SystemBadDimensionException(Exception):
"""Raised if there is a mismatch in dimensions of arguments."""
pass
class SystemSamplePeriodMismatchException(Exception):
"""Raised if there is a mismatch in sample periods."""
pass
class SystemBadSamplePeriodException(Exception):
"""Raised if an illegal sample period is given."""
pass
class SystemIllPosedException(Exception):
"""Raised if an ill-posed feedback loop is closed."""
pass
class System(object):
"""Class for representing as state-space model with named inputs."""
def __init__(self, A, B, C, D, Ts, state_list, input_list, output_list):
"""Constructor for a new system.
If A, B, or C are empty, then all three must be empty and
the system represents a constant gain matrix determined
by D.
If A is non-empty and D is empty, then D is taken to be all zeros.
Args:
A: nx-by-nx matrix.
B: nx-by-nu matrix.
C: ny-by-nx matrix.
D: ny-by-nu matrix.
Ts: Sample period (zero indicates continuous time models, -1.0
indicates a DT model with unspecified sample period).
state_list: SignalList of length nx.
input_list: SignalList of length nu.
output_list: SignalList of length ny.
Raises:
SystemBadSamplePeriodException: If Ts has an invalid value.
SystemBadDimensionException: If the arguments have inconsistent
dimensions.
"""
if not state_list:
state_list = SignalList([])
if isinstance(state_list, list):
state_list = SignalList(state_list)
if isinstance(input_list, list):
input_list = SignalList(input_list)
if isinstance(output_list, list):
output_list = SignalList(output_list)
if Ts < 0.0 and Ts != -1.0:
raise SystemBadSamplePeriodException()
self.Ts = Ts
self._A = np.matrix(A)
self._B = np.matrix(B)
self._C = np.matrix(C)
self._D = np.matrix(D)
if self._A.size == 0:
if self._B.size > 0 or self._C.size > 0:
raise SystemBadDimensionException(A, B, C, D)
self.nx = 0
self.ny = self._D.shape[0]
self.nu = self._D.shape[1]
self._A = np.matrix(np.zeros((self.nx, self.nx)))
self._B = np.matrix(np.zeros((self.nx, self.nu)))
self._C = np.matrix(np.zeros((self.ny, self.nx)))
else:
self.nx = self._A.shape[0]
self.nu = self._B.shape[1]
self.ny = self._C.shape[0]
if self._D.size == 0:
self._D = np.matrix(np.zeros((self.ny, self.nu)))
if (not np.array_equal(self._A.shape, [self.nx, self.nx])
or not np.array_equal(self._B.shape, [self.nx, self.nu])
or not np.array_equal(self._C.shape, [self.ny, self.nx])
or not np.array_equal(self._D.shape, [self.ny, self.nu])):
raise SystemBadDimensionException(self._A.shape, self._B.shape,
self._C.shape, self._D.shape)
if not isinstance(state_list, SignalList) or len(state_list) != self.nx:
raise SystemBadDimensionException('Bad state list.', state_list)
if not isinstance(input_list, SignalList) or len(input_list) != self.nu:
raise SystemBadDimensionException('Bad input list.', input_list)
if not isinstance(output_list, SignalList) or len(output_list) != self.ny:
raise SystemBadDimensionException('Bad output list.', output_list)
self.states = state_list
self.inputs = input_list
self.outputs = output_list
def __repr__(self):
return ('%d-by-%d state-space model (Ts = %g) with %d states.' %
(self.ny, self.nu, self.Ts, self.nx)
+ '\nInputs: ' + self.inputs.__repr__()
+ '\nOutputs: ' + self.outputs.__repr__()
+ '\nA:\n' + str(self._A) + '\nB:\n' + str(self._B)
+ '\nC:\n' + str(self._C) + '\nD:\n' + str(self._D))
def __getitem__(self, io):
"""Construct a new system with a subset of the inputs and outputs.
Args:
io: A tuple of two lists of strings. The first list selects a subset
of the outputs of the system, the second selects a subset of the inputs.
Either can be the trivial slice ':'.
Raises:
SystemInvalidArgumentException: If an invalid argument is passed.
Returns:
A new System object with the reduced input and output dimensions.
"""
if not isinstance(io, tuple) or len(io) != 2:
raise SystemInvalidArgumentException('Index must be a tuple.')
if isinstance(io[1], slice):
if io[1] != slice(None, None, None):
raise SystemInvalidArgumentException('Only ":" slices are allowed.')
input_names = [self.inputs.names[i] for i in range(self.nu)]
else:
input_names = io[1]
if isinstance(io[0], slice):
if io[0] != slice(None, None, None):
raise SystemInvalidArgumentException('Only ":" slices are allowed.')
output_names = [self.outputs.names[i] for i in range(self.ny)]
else:
output_names = io[0]
(inputs, _, B, _) = self._PartitionInputs(self._B, input_names)
(outputs, _, C, _) = self._PartitionOutputs(self._C, output_names)
(_, _, D, _) = self._PartitionInputs(self._D, input_names)
(_, _, D, _) = self._PartitionOutputs(D, output_names)
return System(self._A, B, C, D, self.Ts, self.states, inputs, outputs)
def _PartitionOutputs(self, matrix, output_names):
"""Partition the rows of an ny-by-m matrix.
Args:
matrix: An ny-by-m matrix.
output_names: Names of the outputs to be included in the first
part of the partition.
Raises:
SystemBadDimensionException: If matrix has the wrong number of rows.
Returns:
A tuple (outputs, other_outputs, output_matrix,
other_output_matrix). The first two entries are SignalLists
containing the outputs in output_names and the other outputs.
The first matrix is the sub-matrix corresponding to the
output_names and the remaining rows are in other_output_matrix.
"""
if matrix.shape[0] != self.ny:
raise SystemBadDimensionException(matrix)
outputs = self.outputs[output_names]
output_indices = self.outputs.GetIndices(output_names)
other_output_indices = [
i for i in range(self.ny) if i not in output_indices
]
other_outputs = self.outputs[other_output_indices]
output_matrix = matrix[output_indices, :]
other_output_matrix = matrix[other_output_indices, :]
return (outputs, other_outputs, output_matrix, other_output_matrix)
def _PartitionInputs(self, matrix, input_names):
"""Partition the columns of an m-by-nu matrix.
Args:
matrix: An m-by-nu matrix.
input_names: Names of the inputs to be included in the first
part of the partition.
Raises:
SystemBadDimensionException: If matrix has the wrong number of columns.
Returns:
A tuple (inputs, other_inputs, input_matrix,
other_input_matrix). The first two entries are SignalLists
containing the inputs in input_names and the other inputs.
The first matrix is the sub-matrix corresponding to the
input_names and the remaining rows are in other_input_matrix.
"""
if matrix.shape[1] != self.nu:
raise SystemBadDimensionException(matrix)
inputs = self.inputs[input_names]
input_indices = self.inputs.GetIndices(input_names)
other_input_indices = [
i for i in range(self.nu) if i not in input_indices
]
other_inputs = self.inputs[other_input_indices]
input_matrix = matrix[:, input_indices]
other_input_matrix = matrix[:, other_input_indices]
return (inputs, other_inputs, input_matrix, other_input_matrix)
def GetStateSpaceModel(self):
"""Return the state-space description of the system."""
return self._A, self._B, self._C, self._D, self.Ts
def ReduceStates(self, state_names):
"""Returns a new system model that keeps only a subset of the states.
Args:
state_names: List of state names to retain.
Returns:
A new state-space model with only the desired states retained.
"""
state_indices = self.states.GetIndices(state_names)
states = self.states[state_names]
A = self._A[[[s] for s in state_indices], state_indices]
B = self._B[[s for s in state_indices], :]
C = self._C[:, state_indices]
return System(A, B, C, self._D, self.Ts, states, self.inputs, self.outputs)
|
{
"content_hash": "5a4d20a61118789a1604b80c9d47a54a",
"timestamp": "",
"source": "github",
"line_count": 326,
"max_line_length": 80,
"avg_line_length": 33.61349693251534,
"alnum_prop": 0.6555028289833912,
"repo_name": "google/makani",
"id": "edc64c56b45ba5f3fd26d1d46275cf688dbce62a",
"size": "11547",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "analysis/control/systems.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "119408"
},
{
"name": "C",
"bytes": "20174258"
},
{
"name": "C++",
"bytes": "30512322"
},
{
"name": "CSS",
"bytes": "8921"
},
{
"name": "Dockerfile",
"bytes": "1381"
},
{
"name": "Emacs Lisp",
"bytes": "1134"
},
{
"name": "HTML",
"bytes": "65745"
},
{
"name": "Java",
"bytes": "1558475"
},
{
"name": "JavaScript",
"bytes": "130727"
},
{
"name": "Jupyter Notebook",
"bytes": "1154728"
},
{
"name": "MATLAB",
"bytes": "1026162"
},
{
"name": "Makefile",
"bytes": "2798"
},
{
"name": "Objective-C",
"bytes": "62972"
},
{
"name": "Perl",
"bytes": "870724"
},
{
"name": "Python",
"bytes": "5552781"
},
{
"name": "RPC",
"bytes": "195736"
},
{
"name": "Roff",
"bytes": "2567875"
},
{
"name": "SWIG",
"bytes": "8663"
},
{
"name": "Shell",
"bytes": "297941"
},
{
"name": "Starlark",
"bytes": "462998"
},
{
"name": "Vim Script",
"bytes": "2281"
},
{
"name": "XC",
"bytes": "50398"
},
{
"name": "XS",
"bytes": "49289"
}
],
"symlink_target": ""
}
|
import boto3
import pytest
import sure # noqa # pylint: disable=unused-import
from botocore.exceptions import ClientError
from moto import mock_mediastore
region = "eu-west-1"
@mock_mediastore
def test_create_container_succeeds():
client = boto3.client("mediastore", region_name=region)
response = client.create_container(
ContainerName="Awesome container!", Tags=[{"Key": "customer"}]
)
container = response["Container"]
response["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
container["ARN"].should.equal(f"arn:aws:mediastore:container:{container['Name']}")
container["Name"].should.equal("Awesome container!")
container["Status"].should.equal("CREATING")
@mock_mediastore
def test_describe_container_succeeds():
client = boto3.client("mediastore", region_name=region)
create_response = client.create_container(
ContainerName="Awesome container!", Tags=[{"Key": "customer"}]
)
container_name = create_response["Container"]["Name"]
response = client.describe_container(ContainerName=container_name)
response["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
container = response["Container"]
container["ARN"].should.equal(f"arn:aws:mediastore:container:{container_name}")
container["Name"].should.equal("Awesome container!")
container["Status"].should.equal("ACTIVE")
@mock_mediastore
def test_list_containers_succeeds():
client = boto3.client("mediastore", region_name=region)
client.create_container(
ContainerName="Awesome container!", Tags=[{"Key": "customer"}]
)
list_response = client.list_containers(NextToken="next-token", MaxResults=123)
containers_list = list_response["Containers"]
len(containers_list).should.equal(1)
client.create_container(
ContainerName="Awesome container2!", Tags=[{"Key": "customer"}]
)
list_response = client.list_containers(NextToken="next-token", MaxResults=123)
containers_list = list_response["Containers"]
len(containers_list).should.equal(2)
@mock_mediastore
def test_describe_container_raises_error_if_container_does_not_exist():
client = boto3.client("mediastore", region_name=region)
with pytest.raises(ClientError) as ex:
client.describe_container(ContainerName="container-name")
ex.value.response["Error"]["Code"].should.equal("ResourceNotFoundException")
@mock_mediastore
def test_put_lifecycle_policy_succeeds():
client = boto3.client("mediastore", region_name=region)
container_response = client.create_container(
ContainerName="container-name", Tags=[{"Key": "customer"}]
)
container = container_response["Container"]
client.put_lifecycle_policy(
ContainerName=container["Name"], LifecyclePolicy="lifecycle-policy"
)
response = client.get_lifecycle_policy(ContainerName=container["Name"])
response["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
response["LifecyclePolicy"].should.equal("lifecycle-policy")
@mock_mediastore
def test_put_lifecycle_policy_raises_error_if_container_does_not_exist():
client = boto3.client("mediastore", region_name=region)
with pytest.raises(ClientError) as ex:
client.put_lifecycle_policy(
ContainerName="container-name", LifecyclePolicy="lifecycle-policy"
)
ex.value.response["Error"]["Code"].should.equal("ResourceNotFoundException")
@mock_mediastore
def test_get_lifecycle_policy_raises_error_if_container_does_not_exist():
client = boto3.client("mediastore", region_name=region)
with pytest.raises(ClientError) as ex:
client.get_lifecycle_policy(ContainerName="container-name")
ex.value.response["Error"]["Code"].should.equal("ResourceNotFoundException")
@mock_mediastore
def test_get_lifecycle_policy_raises_error_if_container_does_not_have_lifecycle_policy():
client = boto3.client("mediastore", region_name=region)
client.create_container(ContainerName="container-name", Tags=[{"Key": "customer"}])
with pytest.raises(ClientError) as ex:
client.get_lifecycle_policy(ContainerName="container-name")
ex.value.response["Error"]["Code"].should.equal("PolicyNotFoundException")
@mock_mediastore
def test_put_container_policy_succeeds():
client = boto3.client("mediastore", region_name=region)
container_response = client.create_container(
ContainerName="container-name", Tags=[{"Key": "customer"}]
)
container = container_response["Container"]
response = client.put_container_policy(
ContainerName=container["Name"], Policy="container-policy"
)
response = client.get_container_policy(ContainerName=container["Name"])
response["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
response["Policy"].should.equal("container-policy")
@mock_mediastore
def test_put_container_policy_raises_error_if_container_does_not_exist():
client = boto3.client("mediastore", region_name=region)
with pytest.raises(ClientError) as ex:
client.put_container_policy(
ContainerName="container-name", Policy="container-policy"
)
ex.value.response["Error"]["Code"].should.equal("ResourceNotFoundException")
@mock_mediastore
def test_get_container_policy_raises_error_if_container_does_not_exist():
client = boto3.client("mediastore", region_name=region)
with pytest.raises(ClientError) as ex:
client.get_container_policy(ContainerName="container-name")
ex.value.response["Error"]["Code"].should.equal("ResourceNotFoundException")
@mock_mediastore
def test_get_container_policy_raises_error_if_container_does_not_have_container_policy():
client = boto3.client("mediastore", region_name=region)
client.create_container(ContainerName="container-name", Tags=[{"Key": "customer"}])
with pytest.raises(ClientError) as ex:
client.get_container_policy(ContainerName="container-name")
ex.value.response["Error"]["Code"].should.equal("PolicyNotFoundException")
@mock_mediastore
def test_put_metric_policy_succeeds():
client = boto3.client("mediastore", region_name=region)
container_response = client.create_container(
ContainerName="container-name", Tags=[{"Key": "customer"}]
)
container = container_response["Container"]
response = client.put_metric_policy(
ContainerName=container["Name"],
MetricPolicy={"ContainerLevelMetrics": "ENABLED"},
)
response = client.get_metric_policy(ContainerName=container["Name"])
response["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
response["MetricPolicy"].should.equal({"ContainerLevelMetrics": "ENABLED"})
@mock_mediastore
def test_put_metric_policy_raises_error_if_container_does_not_exist():
client = boto3.client("mediastore", region_name=region)
with pytest.raises(ClientError) as ex:
client.put_metric_policy(
ContainerName="container-name",
MetricPolicy={"ContainerLevelMetrics": "ENABLED"},
)
ex.value.response["Error"]["Code"].should.equal("ResourceNotFoundException")
@mock_mediastore
def test_get_metric_policy_raises_error_if_container_does_not_exist():
client = boto3.client("mediastore", region_name=region)
with pytest.raises(ClientError) as ex:
client.get_metric_policy(ContainerName="container-name")
ex.value.response["Error"]["Code"].should.equal("ResourceNotFoundException")
@mock_mediastore
def test_get_metric_policy_raises_error_if_container_does_not_have_metric_policy():
client = boto3.client("mediastore", region_name=region)
client.create_container(ContainerName="container-name", Tags=[{"Key": "customer"}])
with pytest.raises(ClientError) as ex:
client.get_metric_policy(ContainerName="container-name")
ex.value.response["Error"]["Code"].should.equal("PolicyNotFoundException")
@mock_mediastore
def test_list_tags_for_resource():
client = boto3.client("mediastore", region_name=region)
tags = [{"Key": "customer"}]
create_response = client.create_container(
ContainerName="Awesome container!", Tags=tags
)
container = create_response["Container"]
response = client.list_tags_for_resource(Resource=container["Name"])
response["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
response["Tags"].should.equal(tags)
@mock_mediastore
def test_list_tags_for_resource_return_none_if_no_tags():
client = boto3.client("mediastore", region_name=region)
create_response = client.create_container(ContainerName="Awesome container!")
container = create_response["Container"]
response = client.list_tags_for_resource(Resource=container["Name"])
response["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
response.get("Tags").should.equal(None)
@mock_mediastore
def test_list_tags_for_resource_return_error_for_unknown_resource():
client = boto3.client("mediastore", region_name=region)
with pytest.raises(ClientError) as ex:
client.list_tags_for_resource(Resource="not_existing")
ex.value.response["Error"]["Code"].should.equal("ContainerNotFoundException")
@mock_mediastore
def test_delete_container():
client = boto3.client("mediastore", region_name=region)
container_name = "Awesome container!"
create_response = client.create_container(ContainerName=container_name)
container = create_response["Container"]
response = client.delete_container(ContainerName=container["Name"])
response["ResponseMetadata"]["HTTPStatusCode"].should.equal(200)
containers = client.list_containers(NextToken="next-token")["Containers"]
container_exists = any(d["Name"] == container_name for d in containers)
container_exists.should.equal(False)
@mock_mediastore
def test_delete_container_raise_error_if_container_not_found():
client = boto3.client("mediastore", region_name=region)
client.create_container(ContainerName="Awesome container!")
with pytest.raises(ClientError) as ex:
client.delete_container(ContainerName="notAvailable")
ex.value.response["Error"]["Code"].should.equal("ContainerNotFoundException")
|
{
"content_hash": "f72e07134cf3ddc9a3fe76c0a6ced69f",
"timestamp": "",
"source": "github",
"line_count": 245,
"max_line_length": 89,
"avg_line_length": 41.375510204081635,
"alnum_prop": 0.7226003748643582,
"repo_name": "spulec/moto",
"id": "8c5e0871f036028e6be59e7352fbd7ed0e296cbc",
"size": "10137",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_mediastore/test_mediastore.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "255"
},
{
"name": "HTML",
"bytes": "5983"
},
{
"name": "Java",
"bytes": "1688"
},
{
"name": "JavaScript",
"bytes": "1424"
},
{
"name": "Jinja",
"bytes": "2502"
},
{
"name": "Makefile",
"bytes": "2284"
},
{
"name": "Python",
"bytes": "14737868"
},
{
"name": "Ruby",
"bytes": "188"
},
{
"name": "Scala",
"bytes": "782"
},
{
"name": "Shell",
"bytes": "5515"
}
],
"symlink_target": ""
}
|
"""
Route groups are classes that allow you to group a set of routes together.
.. currentmodule:: kyoukai.routegroup
"""
import collections
import inspect
import typing
def get_rg_bp(group: 'RouteGroup'):
"""
Gets the :class:`~.Blueprint` created from a :class:`~.RouteGroup`.
"""
return getattr(group, "_{0.__name__}__blueprint".format(type(group)))
class RouteGroupType(type):
"""
The metaclass for a route group.
This is responsible for passing the keyword arguments to the metaclass.
"""
def __new__(mcs, name, bases, class_body, **kwargs):
"""
Override of `__new__` to ensure the __init__ signature is compatible.
"""
return super().__new__(mcs, name, bases, class_body)
def __init__(self, name, bases, class_body, **kwargs):
"""
Override of `__init__` to store the blueprint params.
"""
super().__init__(name, bases, class_body)
self._bp_kwargs = kwargs
def _init_blueprint(self, obb):
"""
Initializes the Blueprint used by this route group.
:param obb: The route group instance to intialize.
"""
# circular imports tm
from kyoukai.blueprint import Blueprint
bp = Blueprint(self.__name__, **self._bp_kwargs)
# get all the method types that have a `.route` attr on them
for name, value in inspect.getmembers(obb):
# unwrap methods
if not hasattr(value, "__func__"):
continue
func = value.__func__
if getattr(func, "in_group", False) is True:
# check the delegate type
if func.rg_delegate == "route":
# wrap value, but use func attrs
# this preserves the method and `self`
rtt = bp.wrap_route(value, **func.route_kwargs)
rtt.routes = func.routes
rtt.bp = bp
# copy hooks
for type_, hooks in func.route_hooks.items():
for hook in hooks:
rtt.add_hook(type_, hook)
bp.routes.append(rtt)
elif func.rg_delegate == "errorhandler":
# add the error handler using `errorhandler_code`
for code in func.errorhandler_codes:
bp.add_errorhandler(value, code)
elif func.rg_delegate == "hook":
# add the hook
bp.add_hook(func.hook_type, value)
setattr(obb, "_{.__name__}__blueprint".format(self), bp)
def __call__(self, *args, **kwargs):
obb = object.__new__(self)
obb.__init__(*args, **kwargs)
self._init_blueprint(obb)
return obb
def route(url: str, methods: typing.Iterable[str] = ("GET", "HEAD"), **kwargs):
"""
A companion function to the RouteGroup class. This follows :meth:`.Blueprint.route` in
terms of arguments, and marks a function as a route inside the class.
This will return the original function, with some attributes attached:
- ``in_group``: Marks the function as in the route group.
- ``rg_delegate``: Internal. The type of function inside the group this is.
- ``route_kwargs``: Keyword arguments to provide to ``wrap_route``.
- ``route_url``: The routing URL to provide to ``add_route``.
- ``route_methods``: The methods for the route.
- ``route_hooks``: A defaultdict of route-specific hooks.
Additionally, the following methods are added.
- ``hook``: A decorator that adds a hook of type ``type_``.
- ``before_request``: A decorator that adds a ``pre`` hook.
- ``after_request``: A decorator that adds a ``post`` hook.
.. versionadded:: 2.1.1
.. versionchanged:: 2.1.3
Added the ability to add route-specific hooks.
.. versionchanged:: 2.2.0
Now accepts an already edited function as the function to decorate - this will add a new \
routing url and method pair to the :attr:`.Route.routes`.
.. versionchanged:: 2.2.2
Default methods changed to GET and HEAD.
:param url: The routing URL of the route.
:param methods: An iterable of methods for the route.
"""
def inner(func):
# add the required attrs which are used on a scan later
func.in_group = True
func.rg_delegate = "route"
func.route_kwargs = kwargs
# try and append to the routes
# failing that, create a new list
try:
func.routes.append((url, methods))
except AttributeError:
func.routes = [(url, methods)]
if not hasattr(func, "route_hooks"):
func.route_hooks = collections.defaultdict(lambda: [])
# helper for route-specific hooks.
def hook(type_: str):
def _inner2(hookfunc):
func.route_hooks[type_].append(hookfunc)
return hookfunc
return _inner2
func.hook = hook
func.before_request = hook("pre")
func.after_request = hook("post")
return func
return inner
def errorhandler(startcode: int, endcode: int = None, step: int = None):
"""
A companion function to the RouteGroup class. This follows :meth:`.Blueprint.errorhandler` in
terms of arguments.
:param startcode: The error code to handle, for example 404.
This also represents the start of an error range, if endcode is not None.
:param endcode: The end of the error code range to handle. Error handlers will be added
for all requests between startcode and endcode.
:param step: The step for the error handler range.
"""
def inner(func):
func.in_group = True
func.rg_delegate = "errorhandler"
# less code here
if endcode is None:
codes = [startcode]
else:
codes = range(startcode, endcode, step or 1)
for code in codes:
try:
func.errorhandler_codes.append(code)
except AttributeError:
func.errorhandler_codes = [code]
return func
return inner
# hooks
def hook(type_: str):
"""
Marks a function as a hook.
:param type_: The type of hook to mark.
"""
def inner(func):
func.in_group = True
func.rg_delegate = "hook"
func.hook_type = type_
return func
return inner
def before_request(func):
"""
Helper decorator to mark a function as a pre-request hook.
"""
return hook("pre")(func)
def after_request(func):
"""
Helper decorator to mark a function as a post-request hook.
"""
return hook("post")(func)
class RouteGroup(object, metaclass=RouteGroupType):
"""
A route group is a class that contains multiple methods that are decorated with the route
decorator. They produce a blueprint that can be added to the tree that includes all methods
in the route group.
.. code-block:: python
class MyGroup(RouteGroup, prefix="/api/v1"):
def __init__(self, something: str):
self.something = something
@route("/ping")
async def ping(self, ctx: HTTPRequestContext):
return '{"response": self.something}'
Blueprint parameters can be passed in the class call.
To add the route group as a blueprint, use
:meth:`.Blueprint.add_route_group(MyGroup, *args, **kwargs)`.
"""
|
{
"content_hash": "79c63920338bb2f929aec69bca23537d",
"timestamp": "",
"source": "github",
"line_count": 244,
"max_line_length": 98,
"avg_line_length": 31.71311475409836,
"alnum_prop": 0.5701731713621091,
"repo_name": "SunDwarf/Kyoukai",
"id": "5b4837893c6941241fd59977abb65857dd763483",
"size": "7738",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kyoukai/routegroup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "122943"
}
],
"symlink_target": ""
}
|
import _plotly_utils.basevalidators
class OpacityValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self, plotly_name="opacity", parent_name="scattermapbox.cluster", **kwargs
):
super(OpacityValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "calc"),
max=kwargs.pop("max", 1),
min=kwargs.pop("min", 0),
**kwargs,
)
|
{
"content_hash": "96f2369d08297502785c872b8a0594a1",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 82,
"avg_line_length": 34.5,
"alnum_prop": 0.5778985507246377,
"repo_name": "plotly/plotly.py",
"id": "a6d4ee5b687885127984b8443cb268113bd6e419",
"size": "552",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/validators/scattermapbox/cluster/_opacity.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "545"
},
{
"name": "JavaScript",
"bytes": "2074"
},
{
"name": "PostScript",
"bytes": "565328"
},
{
"name": "Python",
"bytes": "31506317"
},
{
"name": "TypeScript",
"bytes": "71337"
}
],
"symlink_target": ""
}
|
"""Tests and helpers for PCI."""
import bits
import testsuite
def pci_read_helper(bus, dev, fn, reg, pci_read_func, bytes=None, mask=~0, shift=0, **extra_args):
size = bits.addr_alignment(reg)
if bytes is not None:
size = bytes
value = pci_read_func(bus, dev, fn, reg, bytes=size, **extra_args)
value = (value >> shift) & mask
desc = "PCI {:#04x}:{:#04x}.{:#03x} ".format(bus, dev, fn)
if mask == ~0:
if shift == 0:
desc += "reg {:#04x} = {:#x}".format(reg, value)
else:
desc += "(reg {:#04x}) >> {} = {:#x}".format(reg, shift, value)
else:
desc += "((reg {:#04x}) >> {}) & {:#x} = {:#x}".format(reg, shift, mask, value)
return value, desc
def test_pci(text, bus, dev, fn, reg, expected_value, bytes=None, mask=~0, shift=0):
value, desc = pci_read_helper(bus, dev, fn, reg, pci_read_func=bits.pci_read, bytes=bytes, mask=mask, shift=shift)
status = value == expected_value
desc += " (Expected {:#x})".format(expected_value)
if text:
testsuite.test(text, status)
testsuite.print_detail(desc)
else:
testsuite.test(desc, status)
return status
|
{
"content_hash": "44d48bc32c9b4b83430f9d69255061aa",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 118,
"avg_line_length": 34.76470588235294,
"alnum_prop": 0.5693739424703892,
"repo_name": "biosbits/bits",
"id": "d133ab3de9f1b59431de5f41e633713707101812",
"size": "2751",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "python/testpci.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "11674"
},
{
"name": "C",
"bytes": "291011"
},
{
"name": "C++",
"bytes": "11894"
},
{
"name": "Makefile",
"bytes": "10177"
},
{
"name": "Python",
"bytes": "738083"
},
{
"name": "Shell",
"bytes": "3247"
}
],
"symlink_target": ""
}
|
from setuptools import Extension, setup
setup(
name="sdist_native_ext",
version="1.0",
ext_modules=[Extension("ext_module", ["ext_module.c"])]
)
|
{
"content_hash": "67e8783fd402b08a73495fd246589d54",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 59,
"avg_line_length": 22.571428571428573,
"alnum_prop": 0.6582278481012658,
"repo_name": "chaquo/chaquopy",
"id": "86bab260354d3a9979d7151923e3ff87a20bc443",
"size": "158",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "product/gradle-plugin/src/test/integration/packages/src/sdist_native_ext/setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "30"
},
{
"name": "C",
"bytes": "174108"
},
{
"name": "CMake",
"bytes": "1897"
},
{
"name": "CSS",
"bytes": "991"
},
{
"name": "Cython",
"bytes": "251545"
},
{
"name": "Dockerfile",
"bytes": "6938"
},
{
"name": "Groovy",
"bytes": "42472"
},
{
"name": "Java",
"bytes": "159387"
},
{
"name": "Kotlin",
"bytes": "697"
},
{
"name": "Python",
"bytes": "8043408"
},
{
"name": "Roff",
"bytes": "232"
},
{
"name": "Shell",
"bytes": "53150"
},
{
"name": "Starlark",
"bytes": "2018"
}
],
"symlink_target": ""
}
|
from core.clients import ProximityClient, ProximityClientConnectionError
from django.core.management.base import CommandError, NoArgsCommand
class Command(NoArgsCommand):
help = "Flushes the proximity server."
can_import_settings = True
def handle_noargs(self, **options):
client = ProximityClient()
try:
client.flush()
except ProximityClientConnectionError, e:
raise CommandError(e)
else:
self.stdout.write('Proximity server flushed.\n')
|
{
"content_hash": "1a00a38d198f46619b5daa79c8a4c4ec",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 72,
"avg_line_length": 33.3125,
"alnum_prop": 0.6716697936210131,
"repo_name": "socialdevices/manager",
"id": "5a92a2d61dda8febcb3d00b73857843e030ff257",
"size": "533",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "proximity/management/commands/flushproximityserver.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "58821"
},
{
"name": "Python",
"bytes": "759639"
},
{
"name": "Shell",
"bytes": "134"
},
{
"name": "Visual Basic",
"bytes": "982"
}
],
"symlink_target": ""
}
|
"""A collection of agents written in Python"""
from __future__ import unicode_literals, print_function
from cyclus.agents import Region, Institution, Facility
from cyclus import typesystem as ts
class NullRegion(Region):
"""A simple do nothing region."""
class NullInst(Institution):
"""An instition that owns facilities in the simulation but exhibits
null behavior. No parameters are given when using the null institution.
"""
class Sink(Facility):
"""This sink facility accepts specified amount of commodity."""
in_commods = ts.VectorString(
doc="commodities that the sink facility accepts.",
tooltip="input commodities for the sink",
uilabel="List of Input Commodities",
uitype=["oneormore", "incommodity"],
)
recipe = ts.String(
tooltip="input/request recipe name",
doc="Name of recipe to request. If empty, sink requests material no "
"particular composition.",
default="",
uilabel="Input Recipe",
uitype="recipe",
)
max_inv_size = ts.Double(
default=1e299,
doc="total maximum inventory size of sink facility",
uilabel= "Maximum Inventory",
tooltip="sink maximum inventory size",
)
capacity = ts.Double(
doc="capacity the sink facility can accept at each time step",
uilabel="Maximum Throughput",
tooltip="sink capacity",
default=100.0,
)
inventory = ts.ResourceBuffInv(capacity='max_inv_size')
def get_material_requests(self):
if len(self.recipe) == 0:
comp = {}
else:
comp = self.context.get_recipe(self.recipe)
mat = ts.Material.create_untracked(self.capacity, comp)
port = {"commodities": {c: mat for c in self.in_commods},
"constraints": self.capacity}
return port
def get_product_requests(self):
prod = ts.Product.create_untracked(self.capacity, "")
port = {"commodities": {c: prod for c in self.in_commods},
"constraints": self.capacity}
return port
def accept_material_trades(self, responses):
for mat in responses.values():
self.inventory.push(mat)
def accept_product_trades(self, responses):
for prod in responses.values():
self.inventory.push(prod)
class Source(Facility):
"""A minimum implementation source facility that provides a commodity with
a given capacity.
"""
commod = ts.String(
doc="commodity that the source facility supplies",
tooltip="source commodity",
schematype="token",
uilabel="Commodity",
uitype="outcommodity",
)
recipe_name = ts.String(
doc="Recipe name for source facility's commodity. "
"If empty, source supplies material with requested compositions.",
tooltip="commodity recipe name",
schematype="token",
default="",
uilabel="Recipe",
uitype="recipe",
)
capacity = ts.Double(
doc="amount of commodity that can be supplied at each time step",
uilabel="Maximum Throughput",
tooltip="source capacity",
)
def build(self, parent):
super(Source, self).build(parent)
if self.lifetime >= 0:
self.context.schedule_decom(self, self.exit_time)
def get_material_bids(self, requests):
reqs = requests.get(self.commod, None)
if not reqs:
return
if len(self.recipe_name) == 0:
bids = [req for req in reqs]
else:
recipe_comp = self.context.get_recipe(self.recipe_name)
bids = []
for req in reqs:
qty = min(req.target.quantity, self.capacity)
mat = ts.Material.create_untracked(qty, recipe_comp)
bids.append({'request': req, 'offer': mat})
return {'bids': bids, 'constraints': self.capacity}
def get_material_trades(self, trades):
responses = {}
if len(self.recipe_name) == 0:
for trade in trades:
mat = ts.Material.create(self, trade.amt, trade.request.target.comp())
responses[trade] = mat
else:
recipe_comp = self.context.get_recipe(self.recipe_name)
for trade in trades:
mat = ts.Material.create(self, trade.amt, recipe_comp)
responses[trade] = mat
return responses
|
{
"content_hash": "d3af5199a39f12e19618da74cf047222",
"timestamp": "",
"source": "github",
"line_count": 129,
"max_line_length": 86,
"avg_line_length": 34.87596899224806,
"alnum_prop": 0.601911535896866,
"repo_name": "Baaaaam/cyclus",
"id": "8f2ac2a7d957eda2d1d73c261f767998fc5975a2",
"size": "4499",
"binary": false,
"copies": "7",
"ref": "refs/heads/allow_0cap_constrain",
"path": "cyclus/pyagents.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C++",
"bytes": "3991194"
},
{
"name": "CMake",
"bytes": "139563"
},
{
"name": "Python",
"bytes": "469355"
},
{
"name": "Shell",
"bytes": "4685"
}
],
"symlink_target": ""
}
|
from .filter_declarations import * # NOQA
|
{
"content_hash": "6b57868a2193dbd98b4acbdb31ec9847",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 42,
"avg_line_length": 43,
"alnum_prop": 0.7441860465116279,
"repo_name": "urandu/mfl_api",
"id": "751849753aeed99eb4d654a97adcbfce7de16778",
"size": "43",
"binary": false,
"copies": "3",
"ref": "refs/heads/develop",
"path": "common/filters/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "34"
},
{
"name": "HTML",
"bytes": "17307"
},
{
"name": "JavaScript",
"bytes": "1285"
},
{
"name": "Python",
"bytes": "444717"
},
{
"name": "Ruby",
"bytes": "1251"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
__version__ = '1.0.10'
|
{
"content_hash": "b7effb7978351ee3ebaec0970d7b4de4",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 39,
"avg_line_length": 21.333333333333332,
"alnum_prop": 0.65625,
"repo_name": "RobRuana/sideboard",
"id": "885a7961debc721e0e37bf573a0fb2eeebb5117a",
"size": "64",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sideboard/_version.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "2297"
},
{
"name": "JavaScript",
"bytes": "845593"
},
{
"name": "Python",
"bytes": "355561"
},
{
"name": "Shell",
"bytes": "4377"
}
],
"symlink_target": ""
}
|
from __future__ import print_function
import argparse
import json
import os
_FILE_URL = 'https://dl.google.com/dl/android/maven2/com/android/tools/desugar_jdk_libs/1.1.1/desugar_jdk_libs-1.1.1.jar'
_FILE_NAME = 'desugar_jdk_libs-1.1.1.jar'
_FILE_VERSION = '1.1.1'
def do_latest():
print(_FILE_VERSION)
def get_download_url(version):
if _FILE_URL.endswith('.jar'):
ext = '.jar'
elif _FILE_URL.endswith('.aar'):
ext = '.aar'
else:
raise Exception('Unsupported extension for %s' % _FILE_URL)
partial_manifest = {
'url': [_FILE_URL],
'name': [_FILE_NAME],
'ext': ext,
}
print(json.dumps(partial_manifest))
def main():
ap = argparse.ArgumentParser()
sub = ap.add_subparsers()
latest = sub.add_parser("latest")
latest.set_defaults(func=lambda _opts: do_latest())
download = sub.add_parser("get_url")
download.set_defaults(
func=lambda _opts: get_download_url(os.environ['_3PP_VERSION']))
opts = ap.parse_args()
opts.func(opts)
if __name__ == '__main__':
main()
|
{
"content_hash": "248e2f44dcb7a9a69aae4bf37647c695",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 121,
"avg_line_length": 22.6875,
"alnum_prop": 0.6097337006427915,
"repo_name": "ric2b/Vivaldi-browser",
"id": "20341e72417fdac483c27beb87c5b7b1a51ffeec",
"size": "1377",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "chromium/third_party/android_deps/libs/com_android_tools_desugar_jdk_libs/3pp/fetch.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
import warnings
import math
from ...maths import *
from ..core import *
from .core import *
from .common import AcceleratableElement
class _BezierBasePathElement(AcceleratableElement):
"""
An internal base class for BezierPathElements and ReparametrizableBezierPathElements.
"""
def initialize(self, **config):
control_polygon = list(map(Vector2D, config["controlPolygon"]))
if len(control_polygon) < 2:
raise ConfigurationError(
"Must have at least 2 points in controlPolygon.")
# We need to store a separate time parameter because a bezier
# curve ranges from t = 0 to 1.
initial_time = config.get("initialTime", 0)
duration = config.get("duration")
# The number of times to repeat traversal of the path.
repeats = config.get("repeatCount", 1)
self.control_polygon = control_polygon
self.duration = duration
self._time = initial_time
self._transition_time = 0
self._transition_amount = 0
self.repeats = repeats
self._current_iteration = 0
class BezierPathElement(_BezierBasePathElement):
"""
A PathElement that represents motion in a bezier curve of variable degree.
"""
def initialize(self, **config):
super().initialize(**config)
duration = self.duration
if duration is not None:
# If a duration was set, define the speed.
speed = (1 - self._time)/duration * globalSystem._timestep
else:
# Else, find the speed in the config.
if "speed" in config:
speed = config["speed"]
else:
speed = config["initialSpeed"]
self.speed = speed
def updateDisplacement(self):
"""
Update this PathElement's displacement.
"""
self.displacement = bezier(self.control_polygon, self._time)
self._time += self.speed
# Transition the speed (if necessary)
self._transition()
if self.duration is not None:
# If we have completed a single iteration.
if self._time >= 1 or self._time <= 0 and self._current_iteration < self.repeats:
self._current_iteration += 1
# Reversing the control polygon reverses the direction of traversal of
# the bezier curve.
self.control_polygon = self.control_polygon[::-1]
self._time = 0
# If we have completed all iterations.
if self._current_iteration == self.repeats:
self.done = True
# class ReparametrizableBezierPathElement(_BezierBasePathElement):
# def initialize(self, **config):
# super().initialize(**config)
# if self.duration is None:
# # This is really arbitrary. Figure out what you want to do here.
# self.duration = 1
# duration = self.duration
# arclength = bezierArclength(self.control_polygon)
# self.arclength = arclength
# reparametrization = config.get("reparametrization")
# if reparametrization == "fixed":
# # reparametrization = lambda t : 10*math.sin(t/10)
# reparametrization = lambda t : 1
# print("Here's some shit.")
# print()
# for i in range(100):
# print(bezierDerivative(self.control_polygon, i/100).magnitude())
# print()
# # Normalize the reparametrzation so that its integral is equal to the
# # arclength of the bezier curve.
# #
# # Here's a piece of confusing math. In ``normalizeParametrization`` the second
# # parameter is the arclength. So why wouldn't we want to send the arclength of
# # the bezier curve in as that parameter? Well, I'm not entirely sure.
# reparametrization = normalizeParametrization(reparametrization, 0, 1)
# step_size = arclength / duration * globalSystem._timestep
# self.reparametrization = BezierReparametrizer(
# self.control_polygon, reparametrization, step_size,
# initial=bezierArclength(self.control_polygon, self._time)
# )
# def stop(self):
# """Completely hault all motion."""
# self.reparametrization.step_size = 0
# self._transition_time = 0
# def setSpeed(self, speed):
# """Set a new speed."""
# self.reparametrization.step_size = speed * self.arclength / self.duration \
# * globalSystem._timestep
# self._transition_time = 0
# def transitionToSpeed(self, new_speed, time):
# """Smoothly transition to a new speed over a given period of time."""
# new_speed = new_speed * self.arclength / self.duration * globalSystem._timestep
# self._transition_amount = (
# new_speed - self.speed) / time * globalSystem._timestep
# self._transition_time = time
# def _transition(self):
# """
# Update the speed if we are in the middle of transitioning.
# """
# if self._transition_time > 1e-9: # Accounts for floating point errors
# self.reparametrization.step_size += self._transition_amount
# self._transition_time -= globalSystem._timestep
# def updateDisplacement(self):
# """
# Update this PathElement's displacement.
# """
# self.displacement = self.reparametrization.getNext()
# # self._time += self.reparametrization._yn
# # Transition the speed (if necessary)
# self._transition()
# # If we have completed a single iteration.
# if self._time >= 1 or self._time < 0:
# self._current_iteration += 1
# # Reversing the control polygon reverses the direction of traversal of
# # the bezier curve.
# # self.reparametrization.reverse()
# # self._time = 0
# # If we have completed all iterations.
# if self._current_iteration == self.repeats:
# self.done = True
class CompositeBezierPathElement(PathElement):
def initialize(self, **config):
self.control_polygon = list(map(Vector2D, config["controlPolygon"]))
if len(self.control_polygon) < 2:
raise ConfigurationError(
"Must have at least 2 points in controlPolygon.")
self.weight_polygon = list(map(Vector2D, config["weightPolygon"]))
if len(self.weight_polygon) != len(self.control_polygon):
raise ConfigurationError(
"weightPolygon and controlPolygon must have the same number of points.")
self.duration = config["duration"]
self._max_time = len(self.control_polygon) - 1
self._speed = self._max_time/self.duration * globalSystem._timestep
self._time = 0
self._origin = Vector2D.origin
self._current_bezier_num = 1
self._current_bezier = current_bezier = [
self.control_polygon[0],
self.control_polygon[0] + self.weight_polygon[0],
self.control_polygon[1] - self.weight_polygon[1],
self.control_polygon[1]
]
fixed_speed = config.get("fixedSpeed", False)
if fixed_speed:
warnings.warn(
"Fixed speed traversal of bezier curves is " \
"an expensive operation. Do not use in excess.")
self._distance_increment = compositeBezierArclength(
self.control_polygon, self.weight_polygon) \
/ self.duration * globalSystem._timestep
# Runge-Kutta ODE solver.
self._rkode = inverseBezier(self._current_bezier, step_size=self._distance_increment)
self.fixed_speed = fixed_speed
self.repeats = config.get("repeatCount", 1)
self._current_iteration = 0
self._reverse = False
def setOrigin(self, origin):
self._origin = origin
def getDisplacement(self):
if self.done:
return self._displacement + self._origin
self._displacement = bezier(self._current_bezier, self._time)
if self.fixed_speed:
self._time = self._rkode.getNext()
else:
if self._reverse:
self._time -= self._speed
else:
self._time += self._speed
# UGHH comment this trash later please
if (self._time < 0 and self._reverse) or (self._time > 1 and not self._reverse):
if self._reverse:
self._current_bezier_num -= 1
else:
self._current_bezier_num += 1
if self._current_bezier_num == len(self.control_polygon) or self._current_bezier_num == 0:
self._current_iteration += 1
self._reverse = not self._reverse
if self._current_iteration == self.repeats:
self.done = True
if self._reverse:
self._current_bezier_num -= 1
else:
self._current_bezier_num += 1
if self.fixed_speed:
self._rkode.reverse()
else:
n = self._current_bezier_num - 1
self._current_bezier = current_bezier = [
self.control_polygon[n],
self.control_polygon[n] + self.weight_polygon[n],
self.control_polygon[n + 1] - self.weight_polygon[n + 1],
self.control_polygon[n + 1]
]
if self.fixed_speed:
if self._reverse:
self._rkode = inverseBezier(
self._current_bezier, step_size=-self._distance_increment, initial=1)
else:
self._rkode = inverseBezier(
self._current_bezier, step_size=self._distance_increment)
if self._reverse:
self._time = 1
else:
self._time = 0
return self._displacement + self._origin
|
{
"content_hash": "0e5007641b0ee3c200aa99a8701b693c",
"timestamp": "",
"source": "github",
"line_count": 276,
"max_line_length": 93,
"avg_line_length": 30.702898550724637,
"alnum_prop": 0.6857446306348832,
"repo_name": "FCDM/py-dml",
"id": "439f77feada15edfb624628e8396b7cef0fb9c38",
"size": "8474",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dml/components/paths/bezier.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "77357"
}
],
"symlink_target": ""
}
|
import unittest
import numpy
import pytest
import six
import chainer
from chainer import initializers
from chainer import testing
from chainer import utils
import chainerx
# Utilities for contiguousness tests.
#
# These tests checks incoming array contiguousness.
# As it's not possible to assume contiguousness of incoming arrays consistently
# (because gradient_check passes contiguous arrays in numerical_grad),
# we instead simulate the test failure. The function implementation raises an
# error if an incoming array matches the expected contiguousness and we expect
# the failure.
class _ContiguousnessMatched(Exception):
pass
def _is_f_contiguous(shape, strides, itemsize):
if numpy.prod(shape) <= 1:
return True
for sh, st in zip(shape, reversed(strides)):
if sh == 1:
continue
if st != itemsize:
return False
itemsize *= sh
return True
def _get_contiguousness(arr):
if isinstance(arr, chainerx.ndarray):
c_contig = arr.is_contiguous
f_contig = _is_f_contiguous(
arr.shape, arr.strides, arr.itemsize)
return (c_contig, f_contig)
return (arr.flags.c_contiguous, arr.flags.f_contiguous)
def _check_contiguousness(arr, expected_contiguous):
if isinstance(arr, chainer.Variable):
_check_contiguousness(arr.array, expected_contiguous)
return
c_contig, f_contig = _get_contiguousness(arr)
if numpy.prod(arr.shape) <= 1:
return # not applicable for this shape
if expected_contiguous is None:
# expected to be non-contiguous
if not c_contig and not f_contig:
raise _ContiguousnessMatched()
elif expected_contiguous == 'C':
# expected to be C-contiguous
if c_contig:
raise _ContiguousnessMatched()
else:
assert False
def _check_grad(grad, expect_grad_none, class_or_tuple):
if expect_grad_none:
assert grad is None
else:
isinstance(grad, class_or_tuple)
def _check_grads(grads, expect_grads_none, class_or_tuple):
for grad, expect_grad_none in six.moves.zip(grads, expect_grads_none):
_check_grad(grad, expect_grad_none, class_or_tuple)
_inject_backend_tests = testing.inject_backend_tests(
None,
[
# CPU tests
{},
{'use_ideep': 'always'},
# GPU tests
{'use_cuda': True},
{'use_cuda': True, 'cuda_device': 1},
# ChainerX tests
{'use_chainerx': True, 'chainerx_device': 'native:0'},
{'use_chainerx': True, 'chainerx_device': 'cuda:0'},
{'use_chainerx': True, 'chainerx_device': 'cuda:1'},
])
def _forward_correct(x1, x2):
dt = x1.dtype.type
y1 = (x1 + x2) ** dt(2)
y2 = (x1 ** dt(2)) * (x2 ** dt(2))
return utils.force_array(y1), utils.force_array(y2)
def _backward_correct(x1, x2, gy1, gy2):
dt = x1.dtype.type
ggx1 = (
+ gy1 * dt(2) * (x1 + x2)
+ gy2 * dt(2) * x1 * x2 ** dt(2))
ggx2 = (
+ gy1 * dt(2) * (x1 + x2)
+ gy2 * dt(2) * x1 ** dt(2) * x2)
return ggx1, ggx2
def _double_backward_correct(x1, x2, gy1, gy2, ggx1, ggx2):
dt = x1.dtype.type
ggy1 = (ggx1 + ggx2) * dt(2) * (x1 + x2)
ggy2 = (ggx1 * x2 + ggx2 * x1) * dt(2) * x1 * x2
gx1 = (
+ ggx1 * (dt(2) * gy1 + dt(2) * x2 ** dt(2) * gy2)
+ ggx2 * (dt(2) * gy1 + dt(4) * x1 * x2 * gy2))
gx2 = (
+ ggx1 * (dt(2) * gy1 + dt(4) * x1 * x2 * gy2)
+ ggx2 * (dt(2) * gy1 + dt(2) * x1 ** dt(2) * gy2))
return gx1, gx2, ggy1, ggy2
# TestFunctionTestSuccessful
#
# This test checks for successful case.
# Incoming array types are also checked.
class FuncCorrectlyImplemented(chainer.FunctionNode):
def __init__(
self, device,
expect_grad_outputs_none=(False, False),
expect_grad_grad_inputs_none=(False, False)):
self.device = device
self.expect_grad_outputs_none = expect_grad_outputs_none
self.expect_grad_grad_inputs_none = expect_grad_grad_inputs_none
def forward(self, inputs):
device = self.device
x1, x2 = inputs
if device.xp is chainerx:
fallback_device = device.fallback_device
assert isinstance(x1, fallback_device.supported_array_types)
assert isinstance(x2, fallback_device.supported_array_types)
self.retain_inputs((0, 1))
y1, y2 = _forward_correct(x1, x2)
return utils.force_array(y1), utils.force_array(y2)
def backward(self, indexes, grad_outputs):
device = self.device
_check_grads(
grad_outputs, self.expect_grad_outputs_none,
device.supported_array_types)
x1, x2 = self.get_retained_inputs()
gy1, gy2 = grad_outputs
assert isinstance(x1.array, device.supported_array_types)
assert isinstance(x2.array, device.supported_array_types)
grad_func = FuncGradCorrectlyImplemented(
device,
self.expect_grad_outputs_none,
self.expect_grad_grad_inputs_none)
return grad_func.apply((x1, x2, gy1, gy2))
class FuncGradCorrectlyImplemented(chainer.FunctionNode):
def __init__(
self, device,
expect_grad_outputs_none,
expect_grad_grad_inputs_none):
self.device = device
self.expect_grad_outputs_none = expect_grad_outputs_none
self.expect_grad_grad_inputs_none = expect_grad_grad_inputs_none
def forward(self, inputs_and_grad_outputs):
device = self.device
x1, x2, gy1, gy2 = inputs_and_grad_outputs
if device.xp is chainerx:
fallback_device = device.fallback_device
_check_grads(
(gy1, gy2), self.expect_grad_outputs_none,
fallback_device.supported_array_types)
self.retain_inputs((0, 1, 2, 3))
ggx1, ggx2 = _backward_correct(
x1, x2,
0 if self.expect_grad_outputs_none[0] else gy1,
0 if self.expect_grad_outputs_none[1] else gy2)
return utils.force_array(ggx1), utils.force_array(ggx2)
def backward(self, indexes, grad_grad_inputs):
device = self.device
_check_grads(
grad_grad_inputs, self.expect_grad_grad_inputs_none,
chainer.Variable)
ggx1, ggx2 = grad_grad_inputs
x1, x2, gy1, gy2 = self.get_retained_inputs()
assert isinstance(x1, chainer.Variable)
assert isinstance(x2, chainer.Variable)
assert isinstance(x1.array, device.supported_array_types)
assert isinstance(x2.array, device.supported_array_types)
_check_grads(
(gy1, gy2), self.expect_grad_outputs_none, chainer.Variable)
if not self.expect_grad_outputs_none[0]:
isinstance(gy1.array, device.supported_array_types)
if not self.expect_grad_outputs_none[1]:
isinstance(gy2.array, device.supported_array_types)
gx1, gx2, ggy1, ggy2 = _double_backward_correct(
x1, x2,
0 if self.expect_grad_outputs_none[0] else gy1,
0 if self.expect_grad_outputs_none[1] else gy2,
0 if self.expect_grad_grad_inputs_none[0] else ggx1,
0 if self.expect_grad_grad_inputs_none[1] else ggx2)
return gx1, gx2, ggy1, ggy2
@testing.parameterize(*testing.product({
'shape': [(3, 2), (2,), (1,), (), (2, 0, 3)],
}))
@_inject_backend_tests
class TestFunctionTestSuccessful(testing.FunctionTestCase):
def generate_inputs(self):
x1 = numpy.random.uniform(-1, 1, self.shape).astype(numpy.float32)
x2 = numpy.random.uniform(-1, 1, self.shape).astype(numpy.float32)
return x1, x2
def forward(self, inputs, device):
func = FuncCorrectlyImplemented(device)
return func.apply(inputs)
def forward_expected(self, inputs):
return _forward_correct(*inputs)
@_inject_backend_tests
class TestFunctionTestSuccessfulNoneGrads(testing.FunctionTestCase):
def generate_inputs(self):
x1 = numpy.random.uniform(-1, 1, (3, 2)).astype(numpy.float32)
x2 = numpy.random.uniform(-1, 1, (3, 2)).astype(numpy.float32)
return x1, x2
def generate_grad_outputs(self, output_templates):
grad_outputs = (
None,
(numpy.random.uniform(-1, 1, output_templates[1].shape)
.astype(output_templates[1].dtype)))
return grad_outputs
def generate_grad_grad_inputs(self, input_templates):
grad_inputs = (
(numpy.random.uniform(-1, 1, input_templates[0].shape)
.astype(input_templates[0].dtype)),
None)
return grad_inputs
def forward(self, inputs, device):
func = FuncCorrectlyImplemented(
device,
expect_grad_outputs_none=(True, False),
expect_grad_grad_inputs_none=(False, True))
return func.apply(inputs)
def forward_expected(self, inputs):
return _forward_correct(*inputs)
# TestFunctionTestIncorrectForward
#
# This test checks if it can detect incorrect forward implementation.
class FuncWithIncorrectForward(chainer.FunctionNode):
def forward(self, inputs):
x1, x2 = inputs
y1, y2 = _forward_correct(x1, x2)
y1, y2 = utils.force_array(y1), utils.force_array(y2)
y2[...] += 1 # ! make incorrect
return y1, y2
def backward(self, *args, **kwargs):
assert False # should never be called
@testing.parameterize(*testing.product({
'shape': [(3, 2), (2,), (1,), ()],
}))
@_inject_backend_tests
@pytest.mark.xfail(strict=True, raises=testing.FunctionTestError)
class TestFunctionTestIncorrectForward(testing.FunctionTestCase):
skip_backward_test = True
skip_double_backward_test = True
def generate_inputs(self):
x1 = numpy.random.uniform(-1, 1, self.shape).astype(numpy.float32)
x2 = numpy.random.uniform(-1, 1, self.shape).astype(numpy.float32)
return x1, x2
def forward(self, inputs, device):
func = FuncWithIncorrectForward()
return func.apply(inputs)
def forward_expected(self, inputs):
return _forward_correct(*inputs)
# TestFunctionTestIncorrectBackward
#
# This test checks if it can detect incorrect backward implementation.
class FuncWithIncorrectBackward(chainer.FunctionNode):
def __init__(self, expect_grad_outputs_none=(False, False)):
self.expect_grad_outputs_none = expect_grad_outputs_none
def forward(self, inputs):
x1, x2 = inputs
y1, y2 = _forward_correct(x1, x2)
self.retain_inputs((0, 1))
return utils.force_array(y1), utils.force_array(y2)
def backward(self, indexes, grad_outputs):
gy1, gy2 = grad_outputs
x1, x2 = self.get_retained_inputs()
ggx1, ggx2 = _backward_correct(
x1, x2,
0 if self.expect_grad_outputs_none[0] else gy1,
0 if self.expect_grad_outputs_none[1] else gy2)
ggx1 = ggx1 + 100000
ggx2 = ggx2 + 10000 # ! make incorrect
return utils.force_array(ggx1), utils.force_array(ggx2)
@testing.parameterize(*testing.product({
'shape': [(3, 2), (2,), (1,), ()],
}))
@testing.fix_random()
@_inject_backend_tests
@pytest.mark.xfail(strict=True, raises=testing.FunctionTestError)
class TestFunctionTestIncorrectBackward(testing.FunctionTestCase):
skip_forward_test = True
skip_double_backward_test = True
def generate_inputs(self):
x1 = numpy.random.uniform(-1, 1, self.shape).astype(numpy.float32)
x2 = numpy.random.uniform(-1, 1, self.shape).astype(numpy.float32)
return x1, x2
def forward(self, inputs, device):
func = FuncWithIncorrectBackward()
return func.apply(inputs)
def forward_expected(self, inputs):
return _forward_correct(*inputs)
@testing.fix_random()
@_inject_backend_tests
@pytest.mark.xfail(strict=True, raises=testing.FunctionTestError)
class TestFunctionTestIncorrectBackwardNoneGrads(testing.FunctionTestCase):
skip_forward_test = True
skip_double_backward_test = True
def generate_inputs(self):
x1 = numpy.random.uniform(-1, 1, (3, 2)).astype(numpy.float32)
x2 = numpy.random.uniform(-1, 1, (3, 2)).astype(numpy.float32)
return x1, x2
def generate_grad_outputs(self, output_templates):
grad_outputs = (
None,
(numpy.random.uniform(-1, 1, output_templates[1].shape)
.astype(output_templates[1].dtype)))
return grad_outputs
def forward(self, inputs, device):
func = FuncWithIncorrectBackward(
expect_grad_outputs_none=(True, False))
return func.apply(inputs)
def forward_expected(self, inputs):
return _forward_correct(*inputs)
# TestFunctionTestIncorrectDoubleBackward
#
# This test checks if it can detect incorrect double backward implementation.
class FuncWithIncorrectDoubleBackward(chainer.FunctionNode):
def __init__(
self,
expect_grad_outputs_none=(False, False),
expect_grad_grad_inputs_none=(False, False)):
self.expect_grad_outputs_none = expect_grad_outputs_none
self.expect_grad_grad_inputs_none = expect_grad_grad_inputs_none
def forward(self, inputs):
x1, x2 = inputs
y1, y2 = _forward_correct(x1, x2)
self.retain_inputs((0, 1))
return utils.force_array(y1), utils.force_array(y2)
def backward(self, indexes, grad_outputs):
x1, x2 = self.get_retained_inputs()
gy1, gy2 = grad_outputs
grad_func = FuncGradWithIncorrectDoubleBackward(
expect_grad_outputs_none=self.expect_grad_outputs_none,
expect_grad_grad_inputs_none=self.expect_grad_grad_inputs_none)
return grad_func.apply((x1, x2, gy1, gy2))
class FuncGradWithIncorrectDoubleBackward(chainer.FunctionNode):
def __init__(
self,
expect_grad_outputs_none=(False, False),
expect_grad_grad_inputs_none=(False, False)):
self.expect_grad_outputs_none = expect_grad_outputs_none
self.expect_grad_grad_inputs_none = expect_grad_grad_inputs_none
def forward(self, inputs_and_grad_outputs):
x1, x2, gy1, gy2 = inputs_and_grad_outputs
self.retain_inputs((0, 1, 2, 3))
ggx1, ggx2 = _backward_correct(
x1, x2,
0 if self.expect_grad_outputs_none[0] else gy1,
0 if self.expect_grad_outputs_none[1] else gy2)
return utils.force_array(ggx1), utils.force_array(ggx2)
def backward(self, indexes, grad_grad_inputs):
ggx1, ggx2 = grad_grad_inputs
x1, x2, gy1, gy2 = self.get_retained_inputs()
gx1, gx2, ggy1, ggy2 = _double_backward_correct(
x1, x2,
0 if self.expect_grad_outputs_none[0] else gy1,
0 if self.expect_grad_outputs_none[1] else gy2,
0 if self.expect_grad_grad_inputs_none[0] else ggx1,
0 if self.expect_grad_grad_inputs_none[1] else ggx2)
ggy2 = ggy2 + 10000 # ! make incorrect
return gx1, gx2, ggy1, ggy2
@testing.parameterize(*testing.product({
'shape': [(3, 2), (2,), (1,), ()],
}))
@testing.fix_random()
@_inject_backend_tests
@pytest.mark.xfail(strict=True, raises=testing.FunctionTestError)
class TestFunctionTestIncorrectDoubleBackward(testing.FunctionTestCase):
skip_forward_test = True
skip_backward_test = True
def generate_inputs(self):
x1 = numpy.random.uniform(-1, 1, self.shape).astype(numpy.float32)
x2 = numpy.random.uniform(-1, 1, self.shape).astype(numpy.float32)
return x1, x2
def forward(self, inputs, device):
func = FuncWithIncorrectDoubleBackward()
return func.apply(inputs)
def forward_expected(self, inputs):
return _forward_correct(*inputs)
@testing.fix_random()
@_inject_backend_tests
@pytest.mark.xfail(strict=True, raises=testing.FunctionTestError)
class TestFunctionTestIncorrectDoubleBackwardNoneGrads(
testing.FunctionTestCase):
skip_forward_test = True
skip_backward_test = True
def generate_inputs(self):
x1 = numpy.random.uniform(-1, 1, (3, 2)).astype(numpy.float32)
x2 = numpy.random.uniform(-1, 1, (3, 2)).astype(numpy.float32)
return x1, x2
def generate_grad_outputs(self, output_templates):
grad_outputs = (
None,
(numpy.random.uniform(-1, 1, output_templates[1].shape)
.astype(output_templates[1].dtype)))
return grad_outputs
def generate_grad_grad_inputs(self, input_templates):
grad_inputs = (
(numpy.random.uniform(-1, 1, input_templates[0].shape)
.astype(input_templates[0].dtype)),
None)
return grad_inputs
def forward(self, inputs, device):
func = FuncWithIncorrectDoubleBackward(
expect_grad_outputs_none=(True, False),
expect_grad_grad_inputs_none=(False, True))
return func.apply(inputs)
def forward_expected(self, inputs):
return _forward_correct(*inputs)
# FunctionTestCaseArrayContiguousnessTest
class FuncWithContiguousnessCheck(chainer.FunctionNode):
def __init__(self, contiguous, check_on):
self.contiguous = contiguous
self.check_on = check_on
def _check_contiguousness(self, arr):
assert isinstance(arr, chainer.get_array_types())
_check_contiguousness(arr, self.contiguous)
def forward(self, inputs):
x1, x2 = inputs
if self.check_on == 'forward_input':
self._check_contiguousness(x1)
self._check_contiguousness(x2)
self.retain_inputs((0, 1))
y1, y2 = _forward_correct(x1, x2)
return utils.force_array(y1), utils.force_array(y2)
def backward(self, indexes, grad_outputs):
x1, x2 = self.get_retained_inputs()
gy1, gy2 = grad_outputs
if self.check_on == 'backward_retained_input':
self._check_contiguousness(x1.array)
self._check_contiguousness(x2.array)
elif self.check_on == 'backward_grad_output':
self._check_contiguousness(gy1.array)
self._check_contiguousness(gy2.array)
grad_func = FuncGradWithContiguousnessCheck(
self.contiguous, self.check_on)
return grad_func.apply((x1, x2, gy1, gy2))
class FuncGradWithContiguousnessCheck(chainer.FunctionNode):
def __init__(self, contiguous, check_on):
self.contiguous = contiguous
self.check_on = check_on
def _check_contiguousness(self, arr):
testing.function_link._check_contiguousness(arr, self.contiguous)
def forward(self, inputs_and_grad_outputs):
x1, x2, gy1, gy2 = inputs_and_grad_outputs
self.retain_inputs((0, 1, 2, 3))
ggx1, ggx2 = _backward_correct(x1, x2, gy1, gy2)
return utils.force_array(ggx1), utils.force_array(ggx2)
def backward(self, indexes, grad_grad_inputs):
ggx1, ggx2 = grad_grad_inputs
if self.check_on == 'double_backward_grad_grad_input':
self._check_contiguousness(ggx1)
self._check_contiguousness(ggx2)
x1, x2, gy1, gy2 = self.get_retained_inputs()
gx1, gx2, ggy1, ggy2 = _double_backward_correct(
x1, x2, gy1, gy2, ggx1, ggx2)
return gx1, gx2, ggy1, ggy2
@testing.parameterize(*testing.product({
'shape': [(3, 2), (2,), (1, 2)],
'contiguous': [None, 'C'],
'check_on': [ # Check points in which contiguousness is probed.
'forward_input',
# TODO(niboshi): As gradient_check.check_backward currently copies the
# grads without preserving strides, they cannot be non-contiguous.
# Enable this check after check_backward will be fixed.
# 'backward_grad_output',
'backward_retained_input',
# TODO(niboshi): Enable this check after check_backward will be fixed.
# 'double_backward_grad_grad_input',
]}))
@_inject_backend_tests
@pytest.mark.xfail(strict=True, raises=_ContiguousnessMatched)
class FunctionTestCaseArrayContiguousnessTest(testing.FunctionTestCase):
def generate_inputs(self):
x1 = numpy.random.uniform(-1, 1, self.shape).astype(numpy.float32)
x2 = numpy.random.uniform(-1, 1, self.shape).astype(numpy.float32)
return x1, x2
def forward(self, inputs, device):
func = FuncWithContiguousnessCheck(self.contiguous, self.check_on)
return func.apply(inputs)
def forward_expected(self, inputs):
return _forward_correct(*inputs)
def before_test(self, test_name):
# Some combinations of test methods and check points are irrelevant.
# Skip such combinations.
# For example, `test_forward` method does not generate grad_outputs.
if test_name == 'test_forward':
if self.check_on != 'forward_input':
raise unittest.SkipTest()
if test_name == 'test_backward':
if self.check_on == 'double_backward_grad_grad_input':
raise unittest.SkipTest()
class Dot(chainer.FunctionNode):
def __init__(
self, incorrect_forward=False, incorrect_backward_gx=False,
incorrect_backward_gp=False, contiguous=None,
check_on=None):
self.incorrect_forward = incorrect_forward
self.incorrect_backward_gx = incorrect_backward_gx
self.incorrect_backward_gp = incorrect_backward_gp
self.contiguous = contiguous
self.check_on = check_on
def forward(self, inputs):
self.retain_inputs((0, 1))
xp = chainer.backend.get_array_module(*inputs)
x, p = inputs
if self.check_on == 'forward_input':
self._check_contiguousness(x)
self._check_contiguousness(p)
y = xp.dot(x, p)
if self.incorrect_forward:
y *= 9999
return y,
def backward(self, indexes, grad_outputs):
gy, = grad_outputs
x, p = self.get_retained_inputs()
if self.check_on == 'backward_retained_input':
self._check_contiguousness(x.array)
self._check_contiguousness(p.array)
elif self.check_on == 'backward_grad_output':
self._check_contiguousness(gy.array)
gx = chainer.functions.matmul(gy, p.T)
gp = chainer.functions.matmul(x.T, gy)
if self.incorrect_backward_gx:
gx /= 2
if self.incorrect_backward_gp:
gp += 1000
return gx, gp
def _check_contiguousness(self, arr):
assert isinstance(arr, chainer.get_array_types())
_check_contiguousness(arr, self.contiguous)
class DotLink(chainer.Link):
"""correctly implemented dot."""
def __init__(
self, in_size, out_size, initial_p=None, contiguous=None,
check_on=None):
super(DotLink, self).__init__()
with self.init_scope():
if initial_p is None:
initial_p = initializers.Constant(1)
self.p = chainer.Parameter(initial_p, shape=(in_size, out_size))
self.contiguous = contiguous
self.check_on = check_on
def forward(self, inputs):
x = inputs
p = self.p
contiguous = self.contiguous
check_on = self.check_on
y, = Dot(contiguous=contiguous, check_on=check_on).apply((x, p))
return y
class DotLinkIncorrectForward(DotLink):
"""Incorrectly implemented dot (forward)."""
def __init__(self, *args, **kwargs):
super(DotLinkIncorrectForward, self).__init__(*args, **kwargs)
def forward(self, inputs):
x = inputs
p = self.p
y, = Dot(incorrect_forward=True).apply((x, p))
return y
class DotLinkIncorrectBackward(DotLink):
"""Incorrect implementation of dot (backward)."""
def __init__(self, incorrect_gx, incorrect_gp, *args, **kwargs):
super(DotLinkIncorrectBackward, self).__init__(*args, **kwargs)
self.incorrect_gx = incorrect_gx
self.incorrect_gp = incorrect_gp
def forward(self, inputs):
x = inputs
p = self.p
y, = Dot(
incorrect_backward_gx=self.incorrect_gx,
incorrect_backward_gp=self.incorrect_gp).apply((x, p))
return y
class DotLinkIncorrectInitialization(DotLink):
"""Incorrect implementation of dot (parameter initialization)."""
def __init__(self, in_size, out_size, initial_p=None):
# Ignores given initializer here.
super(DotLinkIncorrectInitialization, self).__init__(
in_size, out_size, initializers.Constant(0))
class DotLinkTestBase(object):
param_names = ('p',)
def setUp(self):
self.n = 1
self.in_size = 2
self.out_size = 3
self.dtype = numpy.float32
def generate_params(self):
in_size = self.in_size
out_size = self.out_size
return numpy.random.uniform(
-1, 1, (in_size, out_size)).astype(self.dtype),
def create_link(self, initializers):
initial_p, = initializers
in_size = self.in_size
out_size = self.out_size
return DotLink(in_size, out_size, initial_p)
def generate_inputs(self):
return numpy.random.rand(self.n, self.in_size).astype(self.dtype),
# Required for forward backward tests.
def forward_expected(self, link, inputs):
p = link.p.array
x, = inputs
return numpy.dot(x, p),
# Requires for initializers test.
def get_initializers(self):
return [
initializers.Constant(0), 2,
testing.InitializerArgument(None, initializers.Constant(1))],
@_inject_backend_tests
class TestLinkCorrect(DotLinkTestBase, testing.LinkTestCase):
pass
@_inject_backend_tests
class TestLinkInitializersCorrect(
DotLinkTestBase, testing.LinkInitializersTestCase):
pass
@_inject_backend_tests
@pytest.mark.xfail(strict=True, raises=testing.LinkTestError)
class TestLinkIncorrectForward(DotLinkTestBase, testing.LinkTestCase):
skip_backward_test = True
def create_link(self, initializers):
initial_p, = initializers
in_size = self.in_size
out_size = self.out_size
link = DotLinkIncorrectForward(in_size, out_size, initial_p)
return link
@testing.fix_random()
@_inject_backend_tests
@pytest.mark.xfail(strict=True, raises=testing.LinkTestError)
class TestLinkIncorrectBackwardInput(DotLinkTestBase, testing.LinkTestCase):
skip_forward_test = True
def create_link(self, initializers):
initial_p, = initializers
in_size = self.in_size
out_size = self.out_size
link = DotLinkIncorrectBackward(
True, False, in_size, out_size, initial_p)
return link
@testing.fix_random()
@_inject_backend_tests
@pytest.mark.xfail(strict=True, raises=testing.LinkTestError)
class TestLinkIncorrectBackwardParam(DotLinkTestBase, testing.LinkTestCase):
skip_forward_test = True
def create_link(self, initializers):
initial_p, = initializers
in_size = self.in_size
out_size = self.out_size
link = DotLinkIncorrectBackward(
False, True, in_size, out_size, initial_p)
return link
@_inject_backend_tests
@pytest.mark.xfail(strict=True, raises=TypeError)
class TestLinkIncorrectCreateLink(DotLinkTestBase, testing.LinkTestCase):
def create_link(self, initializers):
# Invalid return type (that is not an instance of chainer.Link).
return numpy.array([1])
@testing.parameterize(*testing.product({
'invalid_forward_backward_initializer': [
chainer.Variable(numpy.array([1])),
chainer.Parameter(numpy.array([1])),
]}))
@_inject_backend_tests
@pytest.mark.xfail(strict=True, raises=TypeError)
class TestLinkIncorrectForwardBackwardInitializers(
DotLinkTestBase, testing.LinkTestCase):
def generate_params(self):
return self.invalid_forward_backward_initializer,
@_inject_backend_tests
@pytest.mark.xfail(strict=True, raises=testing.LinkTestError)
class TestLinkIncorrectBackwardInitializers(
DotLinkTestBase, testing.LinkInitializersTestCase):
def create_link(self, initializers):
initial_p, = initializers
in_size = self.in_size
out_size = self.out_size
link = DotLinkIncorrectInitialization(in_size, out_size, initial_p)
return link
@testing.parameterize(*testing.product({
'invalid_initializer': [
chainer.Variable(numpy.array([1])),
chainer.Parameter(numpy.array([1])),
]}))
@_inject_backend_tests
@pytest.mark.xfail(strict=True, raises=TypeError)
class TestLinkIncorrectInitializers(
DotLinkTestBase, testing.LinkInitializersTestCase):
def get_initializers(self):
return [self.invalid_initializer],
@testing.parameterize(*testing.product({
'contiguous': [None, 'C'],
'check_on': [ # Check points in which contiguousness is probed.
'forward_input',
# TODO(hvy): As gradient_check.check_backward currently copies the
# grads without preserving strides, they cannot be non-contiguous.
# Enable this check after check_backward will be fixed.
# 'backward_grad_output',
'backward_retained_input',
# TODO(hvy): Enable this check after check_backward will be fixed.
# 'double_backward_grad_grad_input',
]}))
@_inject_backend_tests
@pytest.mark.xfail(strict=True, raises=_ContiguousnessMatched)
class TestLinkContiguousness(DotLinkTestBase, testing.LinkTestCase):
def before_test(self, test_name):
# Some combinations of test methods and check points are irrelevant.
# Skip such combinations.
# For example, `test_forward` method does not generate grad_outputs.
if test_name == 'test_forward':
if self.check_on != 'forward_input':
raise unittest.SkipTest()
def create_link(self, initializers):
initial_p, = initializers
in_size = self.in_size
out_size = self.out_size
contiguous = self.contiguous
check_on = self.check_on
link = DotLink(
in_size, out_size, initial_p, contiguous=contiguous,
check_on=check_on)
return link
testing.run_module(__name__, __file__)
|
{
"content_hash": "aca93f74da7943cdd2c7cd2e59d719cf",
"timestamp": "",
"source": "github",
"line_count": 920,
"max_line_length": 79,
"avg_line_length": 33.071739130434786,
"alnum_prop": 0.6350489712745678,
"repo_name": "chainer/chainer",
"id": "d4d318296b429d8ee8bd8bb9d15084d015faa09d",
"size": "30426",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/chainer_tests/testing_tests/test_function_link.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "3805"
},
{
"name": "C",
"bytes": "1099"
},
{
"name": "C++",
"bytes": "1688016"
},
{
"name": "CMake",
"bytes": "51351"
},
{
"name": "Cuda",
"bytes": "191633"
},
{
"name": "Dockerfile",
"bytes": "6102"
},
{
"name": "PowerShell",
"bytes": "7197"
},
{
"name": "Python",
"bytes": "6431941"
},
{
"name": "Shell",
"bytes": "50151"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.